diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 8a9c153da4e0d..32b0a12f06a0e 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -14,7 +14,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part2 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 @@ -22,7 +22,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part3 command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 @@ -30,7 +30,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk - group: bwc-snapshots steps: diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 08ba9529eb882..fab36deb6124a 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -54,7 +54,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" @@ -82,7 +82,7 @@ steps: agents: provider: gcp image: family/elasticsearch-ubuntu-2004 - machineType: custom-32-98304 + machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: ES_RUNTIME_JAVA: "{{matrix.ES_RUNTIME_JAVA}}" @@ -180,3 +180,14 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + if: build.branch == "main" || build.branch =~ /^[0-9]+\.[0-9]+\$/ + - label: Check branch consistency + command: .ci/scripts/run-gradle.sh branchConsistency + timeout_in_minutes: 15 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-2 + - label: Check branch protection rules + command: .buildkite/scripts/branch-protection.sh + timeout_in_minutes: 5 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 6f417e8f8ca84..174a8a3b8c3ec 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -1241,3 +1241,14 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: n2-standard-8 buildDirectory: /dev/shm/bk + if: build.branch == "main" || build.branch =~ /^[0-9]+\.[0-9]+\$/ + - label: Check branch consistency + command: .ci/scripts/run-gradle.sh branchConsistency + timeout_in_minutes: 15 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-2 + - label: Check branch protection rules + command: .buildkite/scripts/branch-protection.sh + timeout_in_minutes: 5 diff --git a/.buildkite/scripts/branch-protection.sh b/.buildkite/scripts/branch-protection.sh new file mode 100755 index 0000000000000..6c1c46ed8f09e --- /dev/null +++ b/.buildkite/scripts/branch-protection.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -euo pipefail + +STATUS=$(curl -s "https://api.github.com/repos/elastic/elasticsearch/branches/$BUILDKITE_BRANCH" | jq '.protected') +echo "Branch $BUILDKITE_BRANCH protection status is: $STATUS" +if [[ "$STATUS" == "false" ]]; then + echo "Development branch $BUILDKITE_BRANCH is not set as protected in GitHub but should be." + exit 1 +fi diff --git a/.ci/jobs.t/elastic+elasticsearch+branch-consistency.yml b/.ci/jobs.t/elastic+elasticsearch+branch-consistency.yml index bb96970010358..5a35727b3df65 100644 --- a/.ci/jobs.t/elastic+elasticsearch+branch-consistency.yml +++ b/.ci/jobs.t/elastic+elasticsearch+branch-consistency.yml @@ -2,12 +2,12 @@ - job: name: elastic+elasticsearch+%BRANCH%+branch-consistency display-name: "elastic / elasticsearch # %BRANCH% - branch consistency" - description: Testing of the Elasticsearch %BRANCH% branch version consistency. - triggers: - - timed: "H 7 * * *" + description: "This job has been migrated to Buildkite.\n" + disabled: true + triggers: [] builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA - shell: | diff --git a/.ci/jobs.t/elastic+elasticsearch+branch-protection.yml b/.ci/jobs.t/elastic+elasticsearch+branch-protection.yml index 30e4c1c20dd43..3e6a88ac93596 100644 --- a/.ci/jobs.t/elastic+elasticsearch+branch-protection.yml +++ b/.ci/jobs.t/elastic+elasticsearch+branch-protection.yml @@ -2,10 +2,10 @@ - job: name: elastic+elasticsearch+%BRANCH%+branch-protection display-name: "elastic / elasticsearch # %BRANCH% - branch protection" - description: Elasticsearch %BRANCH% branch protection. + description: "This job has been migrated to Buildkite.\n" + disabled: true node: master - triggers: - - timed: "H 7 * * *" + triggers: [] scm: [] parameters: [] builders: diff --git a/.ci/scripts/run-gradle.sh b/.ci/scripts/run-gradle.sh index ea68ab7718579..af5db8a6b4063 100755 --- a/.ci/scripts/run-gradle.sh +++ b/.ci/scripts/run-gradle.sh @@ -31,5 +31,10 @@ if ! uname -a | grep -q MING; then export GLIBC_VERSION=$(ldd --version | grep '^ldd' | sed 's/.* \([1-9]\.[0-9]*\).*/\1/') fi +# Running on 2-core machines without ramdisk can make this value be 0 +if [[ "$MAX_WORKERS" == "0" ]]; then + MAX_WORKERS=1 +fi + set -e $GRADLEW -S --max-workers=$MAX_WORKERS $@ diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle index 076c891671992..1a0afe6d7d344 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle @@ -12,7 +12,9 @@ import java.nio.file.Files String buildNumber = System.getenv('BUILD_NUMBER') ?: System.getenv('BUILDKITE_BUILD_NUMBER') String performanceTest = System.getenv('BUILD_PERFORMANCE_TEST') -if (buildNumber && performanceTest == null && GradleUtils.isIncludedBuild(project) == false) { +Boolean isNested = System.getProperty("scan.tag.NESTED") != null + +if (buildNumber && performanceTest == null && GradleUtils.isIncludedBuild(project) == false && isNested == false) { def uploadFilePath = "build/${buildNumber}.tar.bz2" File uploadFile = file(uploadFilePath) project.gradle.buildFinished { result -> diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index e6bbaeb19e495..0f56dd2ef8992 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -101,8 +101,10 @@ buildScan { def jobName = (System.getenv('BUILDKITE_LABEL') ?: '').replaceAll(/[^a-zA-Z0-9_\-]+/, ' ').trim().replaceAll(' ', '_').toLowerCase() tag 'CI' - link 'CI Build', buildKiteUrl + link 'CI Build', "${buildKiteUrl}#${System.getenv('BUILDKITE_JOB_ID')}" value 'Job Number', System.getenv('BUILDKITE_BUILD_NUMBER') + value 'Build ID', System.getenv('BUILDKITE_BUILD_ID') + value 'Job ID', System.getenv('BUILDKITE_JOB_ID') value 'Pipeline', System.getenv('BUILDKITE_PIPELINE_SLUG') tag System.getenv('BUILDKITE_PIPELINE_SLUG') @@ -129,19 +131,30 @@ buildScan { link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" } - buildScanPublished { scan -> - // Attach build scan link as build metadata - // See: https://buildkite.com/docs/pipelines/build-meta-data - new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") - .start() - .waitFor() - - // Add a build annotation - // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: build ran: gradle ${gradle.startParameter.taskNames.join(' ')}
""" - new ProcessBuilder('buildkite-agent', 'annotate', '--context', 'gradle-build-scans', '--append', '--style', 'info', body) - .start() - .waitFor() + buildFinished { result -> + buildScanPublished { scan -> + // Attach build scan link as build metadata + // See: https://buildkite.com/docs/pipelines/build-meta-data + new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") + .start() + .waitFor() + + // Add a build annotation + // See: https://buildkite.com/docs/agent/v3/cli-annotate + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failure ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + new ProcessBuilder( + 'buildkite-agent', + 'annotate', + '--context', + result.failure ? 'gradle-build-scans-failed' : 'gradle-build-scans', + '--append', + '--style', + result.failure ? 'error' : 'info', + body + ) + .start() + .waitFor() + } } } else { tag 'LOCAL' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ExportElasticsearchBuildResourcesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ExportElasticsearchBuildResourcesTask.java index 56ff470b8d3d1..64b9203f8dc4b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ExportElasticsearchBuildResourcesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ExportElasticsearchBuildResourcesTask.java @@ -40,7 +40,7 @@ */ public class ExportElasticsearchBuildResourcesTask extends DefaultTask { - private final Logger logger = Logging.getLogger(ExportElasticsearchBuildResourcesTask.class); + private static final Logger logger = Logging.getLogger(ExportElasticsearchBuildResourcesTask.class); private final Set resources = new HashSet<>(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java new file mode 100644 index 0000000000000..d101c0046f926 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.docker; + +import java.util.Objects; + +/** + * This class models the result of running a command. It captures the exit code, standard output and standard error and allows + * applying String filter for stdout as this is intended to create configuration cache compatible output which + * aims to be agnostic. + */ +public class DockerResult { + + private int exitCode; + private String stdout; + private String stderr; + + public DockerResult(int exitCode, String stdout, String stderr) { + this.exitCode = exitCode; + this.stdout = stdout; + this.stderr = stderr; + } + + public int getExitCode() { + return exitCode; + } + + public String getStdout() { + return stdout; + } + + public String getStderr() { + return stderr; + } + + public void setExitCode(int exitCode) { + this.exitCode = exitCode; + } + + public void setStdout(String stdout) { + this.stdout = stdout; + } + + public void setStderr(String stderr) { + this.stderr = stderr; + } + + public boolean isSuccess() { + return exitCode == 0; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DockerResult that = (DockerResult) o; + return exitCode == that.exitCode && Objects.equals(stdout, that.stdout) && Objects.equals(stderr, that.stderr); + } + + @Override + public int hashCode() { + return Objects.hash(exitCode, stdout, stderr); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java index 2f702b3401309..84728d031c40b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java @@ -14,12 +14,10 @@ import org.gradle.api.GradleException; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; -import org.gradle.process.ExecOperations; -import org.gradle.process.ExecResult; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -56,12 +54,12 @@ public abstract class DockerSupportService implements BuildService ""); - // If docker all checks out, see if docker-compose is available and working Optional composePath = getDockerComposePath(); if (lastResult.isSuccess() && composePath.isPresent()) { isComposeAvailable = runCommand(composePath.get(), "version").isSuccess(); @@ -109,9 +103,12 @@ public DockerAvailability getDockerAvailability() { // Now let's check if buildx is available and what supported platforms exist if (lastResult.isSuccess()) { - Result buildxResult = runCommand(dockerPath, "buildx", "inspect", "--bootstrap"); + DockerResult buildxResult = runCommand( + Arrays.asList(dockerPath, "buildx", "inspect", "--bootstrap"), + input -> input.lines().filter(l -> l.startsWith("Platforms:")).collect(Collectors.joining("\n")) + ); if (buildxResult.isSuccess()) { - supportedArchitectures = buildxResult.stdout() + supportedArchitectures = buildxResult.getStdout() .lines() .filter(l -> l.startsWith("Platforms:")) .map(l -> l.substring(10)) @@ -127,6 +124,8 @@ public DockerAvailability getDockerAvailability() { } } } + } else { + dockerPath = null; } boolean isAvailable = isVersionHighEnough && lastResult != null && lastResult.isSuccess(); @@ -146,6 +145,17 @@ public DockerAvailability getDockerAvailability() { return this.dockerAvailability; } + private DockerResult runCommand(List args, DockerValueSource.OutputFilter outputFilter) { + return providerFactory.of(DockerValueSource.class, params -> { + params.getParameters().getArgs().addAll(args); + params.getParameters().getOutputFilter().set(outputFilter); + }).get(); + } + + private DockerResult runCommand(String... args) { + return runCommand(Arrays.asList(args), input -> input); + } + private boolean dockerDaemonIsRunning(String lastResultOutput) { return lastResultOutput.contains("Cannot connect to the Docker daemon") == false; } @@ -198,8 +208,8 @@ void failIfDockerUnavailable(List tasks) { availability.version == null ? "" : " v" + availability.version, tasks.size() > 1 ? "s" : "", String.join("\n", tasks), - availability.lastCommand.exitCode, - availability.lastCommand.stderr.trim() + availability.lastCommand.getExitCode(), + availability.lastCommand.getStderr().trim() ); throwDockerRequiredException(message); } @@ -319,32 +329,6 @@ private void throwDockerRequiredException(final String message, Exception e) { ); } - /** - * Runs a command and captures the exit code, standard output and standard error. - * - * @param args the command and any arguments to execute - * @return a object that captures the result of running the command. If an exception occurring - * while running the command, or the process was killed after reaching the 10s timeout, - * then the exit code will be -1. - */ - private Result runCommand(String... args) { - if (args.length == 0) { - throw new IllegalArgumentException("Cannot execute with no command"); - } - - ByteArrayOutputStream stdout = new ByteArrayOutputStream(); - ByteArrayOutputStream stderr = new ByteArrayOutputStream(); - - final ExecResult execResult = execOperations.exec(spec -> { - // The redundant cast is to silence a compiler warning. - spec.setCommandLine((Object[]) args); - spec.setStandardOutput(stdout); - spec.setErrorOutput(stderr); - spec.setIgnoreExitValue(true); - }); - return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString()); - } - /** * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}. */ @@ -377,22 +361,12 @@ public record DockerAvailability( Version version, // Information about the last command executes while probing Docker, or null. - Result lastCommand, + DockerResult lastCommand, // Supported build architectures Set supportedArchitectures ) {} - /** - * This class models the result of running a command. It captures the exit code, standard output and standard error. - */ - private record Result(int exitCode, String stdout, String stderr) { - - boolean isSuccess() { - return exitCode == 0; - } - } - interface Parameters extends BuildServiceParameters { File getExclusionsFile(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java new file mode 100644 index 0000000000000..d71208b624d7a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.docker; + +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.gradle.process.ExecOperations; +import org.gradle.process.ExecResult; + +import java.io.ByteArrayOutputStream; +import java.util.List; + +import javax.inject.Inject; + +public abstract class DockerValueSource implements ValueSource { + public interface OutputFilter { + String filter(String input); + } + + interface Parameters extends ValueSourceParameters { + ListProperty getArgs(); + + Property getOutputFilter(); + } + + @Inject + abstract protected ExecOperations getExecOperations(); + + @Override + public DockerResult obtain() { + return runCommand(getParameters().getArgs().get()); + } + + /** + * Runs a command and captures the exit code, standard output and standard error. + * + * @param args the command and any arguments to execute + * @return a object that captures the result of running the command. If an exception occurring + * while running the command, or the process was killed after reaching the 10s timeout, + * then the exit code will be -1. + */ + private DockerResult runCommand(List args) { + if (args.size() == 0) { + throw new IllegalArgumentException("Cannot execute with no command"); + } + + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + + final ExecResult execResult = getExecOperations().exec(spec -> { + // The redundant cast is to silence a compiler warning. + spec.setCommandLine(args); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + spec.setIgnoreExitValue(true); + }); + return new DockerResult(execResult.getExitValue(), filtered(stdout.toString()), stderr.toString()); + } + + private String filtered(String input) { + return getParameters().getOutputFilter().get().filter(input); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java index e158dd7c755c9..194d0361980ec 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java @@ -8,25 +8,568 @@ package org.elasticsearch.gradle.internal.precommit; -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis; +import de.thetaphi.forbiddenapis.Checker; +import de.thetaphi.forbiddenapis.Constants; +import de.thetaphi.forbiddenapis.Logger; +import de.thetaphi.forbiddenapis.ParseException; +import groovy.lang.Closure; +import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.Transformer; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; +import org.gradle.api.file.FileTreeElement; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.logging.Logging; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.provider.SetProperty; +import org.gradle.api.specs.Spec; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.CompileClasspath; import org.gradle.api.tasks.IgnoreEmptyDirectories; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.VerificationTask; +import org.gradle.api.tasks.util.PatternFilterable; +import org.gradle.api.tasks.util.PatternSet; +import org.gradle.workers.WorkAction; +import org.gradle.workers.WorkParameters; +import org.gradle.workers.WorkQueue; +import org.gradle.workers.WorkerExecutor; +import org.jetbrains.annotations.NotNull; -/** - * This implementation is used to fix gradle 8 compatibility of - * the CheckForbiddenApis task which is built with gradle 4 support - * in mind. - * */ -public class CheckForbiddenApisTask extends CheckForbiddenApis { +import java.io.File; +import java.io.IOException; +import java.lang.annotation.RetentionPolicy; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; + +import javax.inject.Inject; + +import static de.thetaphi.forbiddenapis.Checker.Option.DISABLE_CLASSLOADING_CACHE; +import static de.thetaphi.forbiddenapis.Checker.Option.FAIL_ON_MISSING_CLASSES; +import static de.thetaphi.forbiddenapis.Checker.Option.FAIL_ON_UNRESOLVABLE_SIGNATURES; +import static de.thetaphi.forbiddenapis.Checker.Option.FAIL_ON_VIOLATION; + +@CacheableTask +public abstract class CheckForbiddenApisTask extends DefaultTask implements PatternFilterable, VerificationTask, Constants { + + public static final Set BUNDLED_SIGNATURE_DEFAULTS = Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out"); + + private static final String NL = System.getProperty("line.separator", "\n"); + private final PatternSet patternSet = new PatternSet().include("**/*.class"); + private FileCollection classesDirs; + private FileCollection classpath; + private String targetCompatibility; + + private FileCollection signaturesFiles; + + private final ObjectFactory objectFactory; + private ProjectLayout projectLayout; + + private List signatures = new ArrayList<>(); + + private File resourcesDir; + + private boolean ignoreFailures = false; + + @Input + @Optional + abstract SetProperty getBundledSignatures(); + + /** + * List of a custom Java annotations (full class names) that are used in the checked + * code to suppress errors. Those annotations must have at least + * {@link RetentionPolicy#CLASS}. They can be applied to classes, their methods, + * or fields. By default, {@code @de.thetaphi.forbiddenapis.SuppressForbidden} + * can always be used, but needs the {@code forbidden-apis.jar} file in classpath + * of compiled project, which may not be wanted. + * Instead of a full class name, a glob pattern may be used (e.g., + * {@code **.SuppressForbidden}). + */ + @Input + @Optional + abstract SetProperty getSuppressAnnotations(); + + @Inject + public CheckForbiddenApisTask(ObjectFactory factory, ProjectLayout projectLayout) { + signaturesFiles = factory.fileCollection(); + this.objectFactory = factory; + this.projectLayout = projectLayout; + } + + @OutputFile + public File getSuccessMarker() { + return new File(projectLayout.getBuildDirectory().getAsFile().get(), "markers/" + this.getName()); + } + + /** + * Directories with the class files to check. + * Defaults to current sourseSet's output directory (Gradle 3) or output directories (Gradle 4.0+). + */ + @Internal + public FileCollection getClassesDirs() { + return classesDirs; + } + + /** @see #getClassesDirs() */ + public void setClassesDirs(FileCollection classesDirs) { + Objects.requireNonNull(classesDirs, "classesDirs"); + this.classesDirs = classesDirs; + } + + /** Returns the pattern set to match against class files in {@link #getClassesDirs()}. */ + @Internal + public PatternSet getPatternSet() { + return patternSet; + } + + /** @see #getPatternSet() */ + public void setPatternSet(PatternSet patternSet) { + patternSet.copyFrom(patternSet); + } + + /** + * A {@link FileCollection} used to configure the classpath. + * Defaults to current sourseSet's compile classpath. + */ + @CompileClasspath + public FileCollection getClasspath() { + return classpath; + } + + /** @see #getClasspath */ + public void setClasspath(FileCollection classpath) { + Objects.requireNonNull(classpath, "classpath"); + this.classpath = classpath; + } /** - * Add additional annotation to make this input gradle 8 compliant. - * Otherwise we see a deprecation warning here starting with gradle 7.4 - * */ + * A {@link FileCollection} containing all files, which contain signatures and comments for forbidden API calls. + * The signatures are resolved against {@link #getClasspath()}. + */ + @InputFiles + @Optional + @PathSensitive(PathSensitivity.RELATIVE) + public FileCollection getSignaturesFiles() { + return signaturesFiles; + } + + @InputDirectory + @PathSensitive(PathSensitivity.RELATIVE) + public File getResourcesDir() { + return resourcesDir; + } + + public void setResourcesDir(File resourcesDir) { + this.resourcesDir = resourcesDir; + } + + /** @see #getSignaturesFiles */ + public void setSignaturesFiles(FileCollection signaturesFiles) { + this.signaturesFiles = signaturesFiles; + } + + public void modifyBundledSignatures(Transformer, Set> transformer) { + getBundledSignatures().set(transformer.transform(getBundledSignatures().get())); + } + + public void replaceSignatureFiles(String... signatureFiles) { + List resources = new ArrayList<>(signatureFiles.length); + for (Object name : signatureFiles) { + resources.add(new File(resourcesDir, "forbidden/" + name + ".txt")); + } + setSignaturesFiles(objectFactory.fileCollection().from(resources)); + } + + public void addSignatureFiles(String... signatureFiles) { + List resources = new ArrayList<>(signatureFiles.length); + for (Object name : signatureFiles) { + resources.add(new File(resourcesDir, "forbidden/" + name + ".txt")); + } + setSignaturesFiles(objectFactory.fileCollection().from(getSignaturesFiles()).from(resources)); + + } + + /** + * Gives multiple API signatures that are joined with newlines and + * parsed like a single {@link #getSignaturesFiles()}. + * The signatures are resolved against {@link #getClasspath()}. + */ + @Input + @Optional + public List getSignatures() { + return signatures; + } + + /** @see #getSignatures */ + public void setSignatures(List signatures) { + this.signatures = signatures; + } + + /** + * {@inheritDoc} + *

+ * This setting is to conform with {@link VerificationTask} interface. + * Default is {@code false}. + */ @Override + @Input + public boolean getIgnoreFailures() { + return ignoreFailures; + } + + @Override + public void setIgnoreFailures(boolean ignoreFailures) { + this.ignoreFailures = ignoreFailures; + } + + /** + * The default compiler target version used to expand references to bundled JDK signatures. + * E.g., if you use "jdk-deprecated", it will expand to this version. + * This setting should be identical to the target version used in the compiler task. + * Defaults to {@code project.targetCompatibility}. + */ + @Input + @Optional + public String getTargetCompatibility() { + return targetCompatibility; + } + + /** @see #getTargetCompatibility */ + public void setTargetCompatibility(String targetCompatibility) { + this.targetCompatibility = targetCompatibility; + } + + // PatternFilterable implementation: + + /** + * {@inheritDoc} + *

+ * Set of patterns matching all class files to be parsed from the classesDirectory. + * Can be changed to e.g. exclude several files (using excludes). + * The default is a single include with pattern '**/*.class' + */ + @Override + @Internal + public Set getIncludes() { + return getPatternSet().getIncludes(); + } + + @Override + public CheckForbiddenApisTask setIncludes(Iterable includes) { + getPatternSet().setIncludes(includes); + return this; + } + + /** + * {@inheritDoc} + *

+ * Set of patterns matching class files to be excluded from checking. + */ + @Override + @Internal + public Set getExcludes() { + return getPatternSet().getExcludes(); + } + + @Override + public CheckForbiddenApisTask setExcludes(Iterable excludes) { + getPatternSet().setExcludes(excludes); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(String... arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(Iterable arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(Spec arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(@SuppressWarnings("rawtypes") Closure arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(String... arg0) { + getPatternSet().include(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(Iterable arg0) { + getPatternSet().include(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(Spec arg0) { + getPatternSet().include(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(@SuppressWarnings("rawtypes") Closure arg0) { + getPatternSet().include(arg0); + return this; + } + + /** Returns the classes to check. */ + @InputFiles + @SkipWhenEmpty @IgnoreEmptyDirectories + @PathSensitive(PathSensitivity.RELATIVE) public FileTree getClassFiles() { - return super.getClassFiles(); + return getClassesDirs().getAsFileTree().matching(getPatternSet()); + } + + @Inject + public abstract WorkerExecutor getWorkerExecutor(); + + /** Executes the forbidden apis task. */ + @TaskAction + public void checkForbidden() { + WorkQueue workQueue = getWorkerExecutor().noIsolation(); + workQueue.submit(ForbiddenApisCheckWorkAction.class, parameters -> { + parameters.getClasspath().setFrom(getClasspath()); + parameters.getClassDirectories().setFrom(getClassesDirs()); + parameters.getClassFiles().from(getClassFiles().getFiles()); + parameters.getSuppressAnnotations().set(getSuppressAnnotations()); + parameters.getBundledSignatures().set(getBundledSignatures()); + parameters.getSignatures().set(getSignatures()); + parameters.getTargetCompatibility().set(getTargetCompatibility()); + parameters.getIgnoreFailures().set(getIgnoreFailures()); + parameters.getSuccessMarker().set(getSuccessMarker()); + }); + } + + abstract static class ForbiddenApisCheckWorkAction implements WorkAction { + + private final org.gradle.api.logging.Logger logger = Logging.getLogger(getClass()); + + @Inject + public ForbiddenApisCheckWorkAction() {} + + private boolean checkIsUnsupportedJDK(Checker checker) { + if (checker.isSupportedJDK == false) { + final String msg = String.format( + Locale.ENGLISH, + "Your Java runtime (%s %s) is not supported by the forbiddenapis plugin. Please run the checks with a supported JDK!", + System.getProperty("java.runtime.name"), + System.getProperty("java.runtime.version") + ); + logger.warn(msg); + return true; + } + return false; + } + + @Override + public void execute() { + + final URLClassLoader urlLoader = createClassLoader(getParameters().getClasspath(), getParameters().getClassDirectories()); + try { + final Checker checker = createChecker(urlLoader); + if (checkIsUnsupportedJDK(checker)) { + return; + } + + final Set suppressAnnotations = getParameters().getSuppressAnnotations().get(); + for (String a : suppressAnnotations) { + checker.addSuppressAnnotation(a); + } + + try { + final Set bundledSignatures = getParameters().getBundledSignatures().get(); + if (bundledSignatures.isEmpty() == false) { + final String bundledSigsJavaVersion = getParameters().getTargetCompatibility().get(); + if (bundledSigsJavaVersion == null) { + logger.warn( + "The 'targetCompatibility' project or task property is missing. " + + "Trying to read bundled JDK signatures without compiler target. " + + "You have to explicitly specify the version in the resource name." + ); + } + for (String bs : bundledSignatures) { + checker.addBundledSignatures(bs, bundledSigsJavaVersion); + } + } + + final FileCollection signaturesFiles = getParameters().getSignaturesFiles(); + if (signaturesFiles != null) for (final File f : signaturesFiles) { + checker.parseSignaturesFile(f); + } + final List signatures = getParameters().getSignatures().get(); + if ((signatures != null) && !signatures.isEmpty()) { + final StringBuilder sb = new StringBuilder(); + for (String line : signatures) { + sb.append(line).append(NL); + } + checker.parseSignaturesString(sb.toString()); + } + } catch (IOException ioe) { + throw new GradleException("IO problem while reading files with API signatures.", ioe); + } catch (ParseException pe) { + throw new InvalidUserDataException("Parsing signatures failed: " + pe.getMessage(), pe); + } + + if (checker.hasNoSignatures()) { + if (checker.noSignaturesFilesParsed()) { + throw new InvalidUserDataException( + "No signatures were added to task; use properties 'signatures', 'bundledSignatures', 'signaturesURLs', and/or 'signaturesFiles' to define those!" + ); + } else { + logger.info("Skipping execution because no API signatures are available."); + return; + } + } + + try { + checker.addClassesToCheck(getParameters().getClassFiles()); + } catch (IOException ioe) { + throw new GradleException("Failed to load one of the given class files.", ioe); + } + checker.run(); + writeMarker(getParameters().getSuccessMarker().getAsFile().get()); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + // Close the classloader to free resources: + try { + if (urlLoader != null) urlLoader.close(); + } catch (IOException ioe) { + // getLogger().warn("Cannot close classloader: ".concat(ioe.toString())); + } + } + } + + private void writeMarker(File successMarker) throws IOException { + Files.write(successMarker.toPath(), new byte[] {}, StandardOpenOption.CREATE); + } + + private URLClassLoader createClassLoader(FileCollection classpath, FileCollection classesDirs) { + if (classesDirs == null || classpath == null) { + throw new InvalidUserDataException("Missing 'classesDirs' or 'classpath' property."); + } + + final Set cpElements = new LinkedHashSet<>(); + cpElements.addAll(classpath.getFiles()); + cpElements.addAll(classesDirs.getFiles()); + final URL[] urls = new URL[cpElements.size()]; + try { + int i = 0; + for (final File cpElement : cpElements) { + urls[i++] = cpElement.toURI().toURL(); + } + assert i == urls.length; + } catch (MalformedURLException mfue) { + throw new InvalidUserDataException("Failed to build classpath URLs.", mfue); + } + + return URLClassLoader.newInstance(urls, ClassLoader.getSystemClassLoader()); + } + + @NotNull + private Checker createChecker(URLClassLoader urlLoader) { + final EnumSet options = EnumSet.noneOf(Checker.Option.class); + options.add(FAIL_ON_MISSING_CLASSES); + if (getParameters().getIgnoreFailures().get() == false) { + options.add(FAIL_ON_VIOLATION); + } + options.add(FAIL_ON_UNRESOLVABLE_SIGNATURES); + options.add(DISABLE_CLASSLOADING_CACHE); + final Checker checker = new Checker(new GradleForbiddenApiLogger(logger), urlLoader, options); + return checker; + } + + private static class GradleForbiddenApiLogger implements Logger { + + private final org.gradle.api.logging.Logger delegate; + + GradleForbiddenApiLogger(org.gradle.api.logging.Logger delegate) { + this.delegate = delegate; + } + + @Override + public void error(String msg) { + delegate.error(msg); + } + + @Override + public void warn(String msg) { + delegate.warn(msg); + } + + @Override + public void info(String msg) { + delegate.info(msg); + } + + @Override + public void debug(String msg) { + delegate.debug(msg); + } + }; } + + interface Parameters extends WorkParameters { + ConfigurableFileCollection getClassDirectories(); + + ConfigurableFileCollection getClassFiles(); + + ConfigurableFileCollection getClasspath(); + + SetProperty getSuppressAnnotations(); + + RegularFileProperty getSuccessMarker(); + + ConfigurableFileCollection getSignaturesFiles(); + + SetProperty getBundledSignatures(); + + Property getTargetCompatibility(); + + Property getIgnoreFailures(); + + ListProperty getSignatures(); + + } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java index 71de2626d5fca..092230a2b12ea 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java @@ -88,8 +88,6 @@ public class DependencyLicensesTask extends DefaultTask { private final Logger logger = Logging.getLogger(getClass()); - private static final String SHA_EXTENSION = ".sha1"; - // TODO: we should be able to default this to eg compile deps, but we need to move the licenses // check from distribution to core (ie this should only be run on java projects) /** diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java index 96fb11214902a..e24dd5ab2094b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java @@ -8,50 +8,37 @@ package org.elasticsearch.gradle.internal.precommit; -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApisExtension; -import groovy.lang.Closure; - import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; import org.gradle.api.Project; import org.gradle.api.Task; -import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.plugins.JavaBasePlugin; -import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.specs.Specs; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskProvider; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; +import java.io.File; import java.util.Set; -import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_EXTENSION_NAME; import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; +import static org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask.BUNDLED_SIGNATURE_DEFAULTS; public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin { + @Override public TaskProvider createTask(Project project) { project.getPluginManager().apply(JavaBasePlugin.class); - // create Extension for defaults: - var checkForbiddenApisExtension = project.getExtensions() - .create(FORBIDDEN_APIS_EXTENSION_NAME, CheckForbiddenApisExtension.class, project); - // Create a convenience task for all checks (this does not conflict with extension, as it has higher priority in DSL): var forbiddenTask = project.getTasks() .register(FORBIDDEN_APIS_TASK_NAME, task -> { task.setDescription("Runs forbidden-apis checks."); }); - JavaPluginExtension javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class); - // Define our tasks (one for each SourceSet): - TaskProvider resourcesTask = project.getTasks() .register("forbiddenApisResources", ExportElasticsearchBuildResourcesTask.class); - Path resourcesDir = project.getBuildDir().toPath().resolve("forbidden-apis-config"); + File resourcesDir = project.getLayout().getBuildDirectory().dir("forbidden-apis-config").get().getAsFile(); resourcesTask.configure(t -> { - t.setOutputDir(resourcesDir.toFile()); + t.setOutputDir(resourcesDir); t.copy("forbidden/jdk-signatures.txt"); t.copy("forbidden/jdk-deprecated.txt"); t.copy("forbidden/es-all-signatures.txt"); @@ -65,60 +52,36 @@ public TaskProvider createTask(Project project) { String sourceSetTaskName = sourceSet.getTaskName(FORBIDDEN_APIS_TASK_NAME, null); var sourceSetTask = project.getTasks().register(sourceSetTaskName, CheckForbiddenApisTask.class, t -> { t.setDescription("Runs forbidden-apis checks on '${sourceSet.name}' classes."); + t.setResourcesDir(resourcesDir); t.getOutputs().upToDateWhen(Specs.SATISFIES_ALL); t.setClassesDirs(sourceSet.getOutput().getClassesDirs()); t.dependsOn(resourcesTask); - t.setClasspath(sourceSet.getRuntimeClasspath().plus(sourceSet.getCompileClasspath()).plus(sourceSet.getOutput())); + t.setClasspath(sourceSet.getRuntimeClasspath().plus(sourceSet.getCompileClasspath())); t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); - t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out")); + t.getBundledSignatures().set(BUNDLED_SIGNATURE_DEFAULTS); t.setSignaturesFiles( project.files( - resourcesDir.resolve("forbidden/jdk-signatures.txt"), - resourcesDir.resolve("forbidden/es-all-signatures.txt"), - resourcesDir.resolve("forbidden/jdk-deprecated.txt") + resourcesDir.toPath().resolve("forbidden/jdk-signatures.txt"), + resourcesDir.toPath().resolve("forbidden/es-all-signatures.txt"), + resourcesDir.toPath().resolve("forbidden/jdk-deprecated.txt") ) ); - t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); + t.getSuppressAnnotations().set(Set.of("**.SuppressForbidden")); if (t.getName().endsWith("Test")) { t.setSignaturesFiles( t.getSignaturesFiles() .plus( project.files( - resourcesDir.resolve("forbidden/es-test-signatures.txt"), - resourcesDir.resolve("forbidden/http-signatures.txt") + resourcesDir.toPath().resolve("forbidden/es-test-signatures.txt"), + resourcesDir.toPath().resolve("forbidden/http-signatures.txt") ) ) ); } else { t.setSignaturesFiles( - t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt"))) + t.getSignaturesFiles().plus(project.files(resourcesDir.toPath().resolve("forbidden/es-server-signatures.txt"))) ); } - ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties(); - ext.set("replaceSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); - } - t.setSignaturesFiles(project.files(resources)); - return null; - } - - }); - ext.set("addSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); - } - t.setSignaturesFiles(t.getSignaturesFiles().plus(project.files(resources))); - return null; - } - }); - }); forbiddenTask.configure(t -> t.dependsOn(sourceSetTask)); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java index 0059913ad086d..559d7536c310a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java @@ -52,7 +52,7 @@ public LoggerUsageTask(ObjectFactory objectFactory) { } @Inject - abstract public WorkerExecutor getWorkerExecutor(); + public abstract WorkerExecutor getWorkerExecutor(); @TaskAction public void runLoggerUsageTask() { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 1ff6e2f505436..c602a50c2adb8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -58,6 +58,7 @@ */ public class RestTestBasePlugin implements Plugin { + private static final String TESTS_MAX_PARALLEL_FORKS_SYSPROP = "tests.max.parallel.forks"; private static final String TESTS_RUNTIME_JAVA_SYSPROP = "tests.runtime.java"; private static final String DEFAULT_DISTRIBUTION_SYSPROP = "tests.default.distribution"; private static final String INTEG_TEST_DISTRIBUTION_SYSPROP = "tests.integ-test.distribution"; @@ -123,6 +124,7 @@ public void apply(Project project) { // Enable parallel execution for these tests since each test gets its own cluster task.setMaxParallelForks(task.getProject().getGradle().getStartParameter().getMaxWorkerCount() / 2); + nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks())); // Disable test failure reporting since this stuff is now captured in build scans task.getInputs().property(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, false); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index eee1c4c21eb08..76004e3e5f6db 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -18,6 +18,7 @@ import com.fasterxml.jackson.databind.node.TextNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLParser; +import com.google.common.collect.Sets; import org.apache.commons.lang3.tuple.Pair; import org.elasticsearch.gradle.Version; @@ -44,6 +45,7 @@ import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.FileTree; import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; @@ -67,7 +69,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.Collectors; import javax.inject.Inject; @@ -75,7 +76,7 @@ /** * A task to transform REST tests for use in REST API compatibility before they are executed. */ -public class RestCompatTestTransformTask extends DefaultTask { +public abstract class RestCompatTestTransformTask extends DefaultTask { private static final YAMLFactory YAML_FACTORY = new YAMLFactory(); private static final ObjectMapper MAPPER = new ObjectMapper(YAML_FACTORY); @@ -90,30 +91,28 @@ public class RestCompatTestTransformTask extends DefaultTask { private final DirectoryProperty sourceDirectory; private final DirectoryProperty outputDirectory; private final PatternFilterable testPatternSet; - private final Factory patternSetFactory; - private final List> transformations = new ArrayList<>(); // PatternFilterable -> reason why skipped. private final Map skippedTestByFilePatternTransformations = new HashMap<>(); // PatternFilterable -> list of full test names and reasons. Needed for 1 pattern may include many tests and reasons private final Map>> skippedTestByTestNameTransformations = new HashMap<>(); @Inject - public RestCompatTestTransformTask( - FileSystemOperations fileSystemOperations, - Factory patternSetFactory, - ObjectFactory objectFactory - ) { - this.patternSetFactory = patternSetFactory; + protected Factory getPatternSetFactory() { + throw new UnsupportedOperationException(); + } + + @Inject + public RestCompatTestTransformTask(FileSystemOperations fileSystemOperations, ObjectFactory objectFactory) { this.fileSystemOperations = fileSystemOperations; this.compatibleVersion = Version.fromString(VersionProperties.getVersions().get("elasticsearch")).getMajor() - 1; this.sourceDirectory = objectFactory.directoryProperty(); this.outputDirectory = objectFactory.directoryProperty(); - this.testPatternSet = patternSetFactory.create(); + this.testPatternSet = getPatternSetFactory().create(); this.testPatternSet.include("/*" + "*/*.yml"); // concat these strings to keep build from thinking this is invalid javadoc // always inject compat headers headers.put("Content-Type", "application/vnd.elasticsearch+json;compatible-with=" + compatibleVersion); headers.put("Accept", "application/vnd.elasticsearch+json;compatible-with=" + compatibleVersion); - transformations.add(new InjectHeaders(headers, Set.of(RestCompatTestTransformTask::doesNotHaveCatOperation))); + getTransformations().add(new InjectHeaders(headers, Sets.newHashSet(RestCompatTestTransformTask::doesNotHaveCatOperation))); } private static boolean doesNotHaveCatOperation(ObjectNode doNodeValue) { @@ -143,7 +142,7 @@ public void skipTest(String fullTestName, String reason) { ); } - PatternSet skippedPatternSet = patternSetFactory.create(); + PatternSet skippedPatternSet = getPatternSetFactory().create(); // create file patterns for all a1/a2/a3/b.yml possibilities. for (int i = testParts.length - 1; i > 1; i--) { final String lastPart = testParts[i]; @@ -157,7 +156,7 @@ public void skipTest(String fullTestName, String reason) { } public void skipTestsByFilePattern(String filePattern, String reason) { - PatternSet skippedPatternSet = patternSetFactory.create(); + PatternSet skippedPatternSet = getPatternSetFactory().create(); skippedPatternSet.include(filePattern); skippedTestByFilePatternTransformations.put(skippedPatternSet, reason); } @@ -170,7 +169,7 @@ public void skipTestsByFilePattern(String filePattern, String reason) { * @param value the value used in the replacement. For example "bar" */ public void replaceValueInMatch(String subKey, Object value) { - transformations.add(new ReplaceValueInMatch(subKey, MAPPER.convertValue(value, JsonNode.class))); + getTransformations().add(new ReplaceValueInMatch(subKey, MAPPER.convertValue(value, JsonNode.class))); } /** @@ -181,7 +180,7 @@ public void replaceValueInMatch(String subKey, Object value) { * @param testName the testName to apply replacement */ public void replaceValueInMatch(String subKey, Object value, String testName) { - transformations.add(new ReplaceValueInMatch(subKey, MAPPER.convertValue(value, JsonNode.class), testName)); + getTransformations().add(new ReplaceValueInMatch(subKey, MAPPER.convertValue(value, JsonNode.class), testName)); } /** @@ -193,7 +192,7 @@ public void replaceValueInMatch(String subKey, Object value, String testName) { * @see ReplaceKeyInDo */ public void replaceKeyInDo(String oldKeyName, String newKeyName, String testName) { - transformations.add(new ReplaceKeyInDo(oldKeyName, newKeyName, testName)); + getTransformations().add(new ReplaceKeyInDo(oldKeyName, newKeyName, testName)); } /** @@ -204,7 +203,7 @@ public void replaceKeyInDo(String oldKeyName, String newKeyName, String testName * @see ReplaceKeyInDo */ public void replaceKeyInDo(String oldKeyName, String newKeyName) { - transformations.add(new ReplaceKeyInDo(oldKeyName, newKeyName, null)); + getTransformations().add(new ReplaceKeyInDo(oldKeyName, newKeyName, null)); } /** @@ -215,7 +214,7 @@ public void replaceKeyInDo(String oldKeyName, String newKeyName) { * @see ReplaceKeyInLength */ public void replaceKeyInLength(String oldKeyName, String newKeyName) { - transformations.add(new ReplaceKeyInLength(oldKeyName, newKeyName, null)); + getTransformations().add(new ReplaceKeyInLength(oldKeyName, newKeyName, null)); } /** @@ -226,7 +225,7 @@ public void replaceKeyInLength(String oldKeyName, String newKeyName) { * @param value the value used in the replacement. For example 99 */ public void replaceValueInLength(String subKey, int value) { - transformations.add(new ReplaceValueInLength(subKey, MAPPER.convertValue(value, NumericNode.class))); + getTransformations().add(new ReplaceValueInLength(subKey, MAPPER.convertValue(value, NumericNode.class))); } /** @@ -238,7 +237,7 @@ public void replaceValueInLength(String subKey, int value) { * @param testName the testName to apply replacement */ public void replaceValueInLength(String subKey, int value, String testName) { - transformations.add(new ReplaceValueInLength(subKey, MAPPER.convertValue(value, NumericNode.class), testName)); + getTransformations().add(new ReplaceValueInLength(subKey, MAPPER.convertValue(value, NumericNode.class), testName)); } /** @@ -249,7 +248,7 @@ public void replaceValueInLength(String subKey, int value, String testName) { * @see ReplaceKeyInMatch */ public void replaceKeyInMatch(String oldKeyName, String newKeyName) { - transformations.add(new ReplaceKeyInMatch(oldKeyName, newKeyName, null)); + getTransformations().add(new ReplaceKeyInMatch(oldKeyName, newKeyName, null)); } /** @@ -260,7 +259,7 @@ public void replaceKeyInMatch(String oldKeyName, String newKeyName) { * @param newValue the value used in the replacement */ public void replaceIsTrue(String oldValue, Object newValue) { - transformations.add(new ReplaceIsTrue(oldValue, MAPPER.convertValue(newValue, TextNode.class))); + getTransformations().add(new ReplaceIsTrue(oldValue, MAPPER.convertValue(newValue, TextNode.class))); } /** @@ -271,7 +270,7 @@ public void replaceIsTrue(String oldValue, Object newValue) { * @param newValue the value used in the replacement */ public void replaceIsFalse(String oldValue, Object newValue) { - transformations.add(new ReplaceIsFalse(oldValue, MAPPER.convertValue(newValue, TextNode.class))); + getTransformations().add(new ReplaceIsFalse(oldValue, MAPPER.convertValue(newValue, TextNode.class))); } /** @@ -283,7 +282,7 @@ public void replaceIsFalse(String oldValue, Object newValue) { * @param testName the testName to apply replacement */ public void replaceIsFalse(String oldValue, Object newValue, String testName) { - transformations.add(new ReplaceIsFalse(oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); + getTransformations().add(new ReplaceIsFalse(oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); } /** @@ -295,7 +294,7 @@ public void replaceIsFalse(String oldValue, Object newValue, String testName) { * @param newValue the value used in the replacement */ public void replaceValueTextByKeyValue(String key, String oldValue, Object newValue) { - transformations.add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class))); + getTransformations().add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class))); } /** @@ -308,7 +307,7 @@ public void replaceValueTextByKeyValue(String key, String oldValue, Object newVa * @param testName the testName to apply replacement */ public void replaceValueTextByKeyValue(String key, String oldValue, Object newValue, String testName) { - transformations.add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); + getTransformations().add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class), testName)); } /** @@ -319,7 +318,7 @@ public void replaceValueTextByKeyValue(String key, String oldValue, Object newVa * @param subKey the key name directly under match to replace. For example "_type" */ public void removeMatch(String subKey) { - transformations.add(new RemoveMatch(subKey)); + getTransformations().add(new RemoveMatch(subKey)); } /** @@ -331,7 +330,7 @@ public void removeMatch(String subKey) { * @param testName the testName to apply removal */ public void removeMatch(String subKey, String testName) { - transformations.add(new RemoveMatch(subKey, testName)); + getTransformations().add(new RemoveMatch(subKey, testName)); } /** @@ -342,7 +341,7 @@ public void removeMatch(String subKey, String testName) { * @param testName the testName to apply addition */ public void addMatch(String subKey, Object value, String testName) { - transformations.add(new AddMatch(subKey, MAPPER.convertValue(value, JsonNode.class), testName)); + getTransformations().add(new AddMatch(subKey, MAPPER.convertValue(value, JsonNode.class), testName)); } /** @@ -352,7 +351,7 @@ public void addMatch(String subKey, Object value, String testName) { * @param warnings the warning(s) to add */ public void addWarning(String testName, String... warnings) { - transformations.add(new InjectWarnings(Arrays.asList(warnings), testName)); + getTransformations().add(new InjectWarnings(Arrays.asList(warnings), testName)); } /** @@ -362,7 +361,7 @@ public void addWarning(String testName, String... warnings) { * @param warningsRegex the regex warning(s) to add */ public void addWarningRegex(String testName, String... warningsRegex) { - transformations.add(new InjectWarnings(true, Arrays.asList(warningsRegex), testName)); + getTransformations().add(new InjectWarnings(true, Arrays.asList(warningsRegex), testName)); } /** @@ -371,7 +370,7 @@ public void addWarningRegex(String testName, String... warningsRegex) { * @param warnings the warning(s) to remove */ public void removeWarning(String... warnings) { - transformations.add(new RemoveWarnings(Set.copyOf(Arrays.asList(warnings)))); + getTransformations().add(new RemoveWarnings(Sets.newHashSet(warnings))); } /** @@ -381,7 +380,7 @@ public void removeWarning(String... warnings) { * @param testName the test name to remove the warning */ public void removeWarningForTest(String warnings, String testName) { - transformations.add(new RemoveWarnings(Set.copyOf(Arrays.asList(warnings)), testName)); + getTransformations().add(new RemoveWarnings(Sets.newHashSet(warnings), testName)); } /** @@ -390,7 +389,7 @@ public void removeWarningForTest(String warnings, String testName) { * @param allowedWarnings the warning(s) to add */ public void addAllowedWarning(String... allowedWarnings) { - transformations.add(new InjectAllowedWarnings(Arrays.asList(allowedWarnings))); + getTransformations().add(new InjectAllowedWarnings(Arrays.asList(allowedWarnings))); } /** @@ -399,7 +398,7 @@ public void addAllowedWarning(String... allowedWarnings) { * @param allowedWarningsRegex the regex warning(s) to add */ public void addAllowedWarningRegex(String... allowedWarningsRegex) { - transformations.add(new InjectAllowedWarnings(true, Arrays.asList(allowedWarningsRegex))); + getTransformations().add(new InjectAllowedWarnings(true, Arrays.asList(allowedWarningsRegex))); } /** @@ -409,7 +408,7 @@ public void addAllowedWarningRegex(String... allowedWarningsRegex) { * @testName the test name to add a allowedWarningRegex */ public void addAllowedWarningRegexForTest(String allowedWarningsRegex, String testName) { - transformations.add(new InjectAllowedWarnings(true, Arrays.asList(allowedWarningsRegex), testName)); + getTransformations().add(new InjectAllowedWarnings(true, Arrays.asList(allowedWarningsRegex), testName)); } @OutputDirectory @@ -463,10 +462,10 @@ public void transform() throws IOException { skippedFilesWithTestAndReason.get(file).forEach(fullTestNameAndReasonPair -> { String prefix = file.getName().replace(".yml", "/"); String singleTestName = fullTestNameAndReasonPair.getLeft().replaceAll(".*" + prefix, ""); - transformations.add(new Skip(singleTestName, fullTestNameAndReasonPair.getRight())); + getTransformations().add(new Skip(singleTestName, fullTestNameAndReasonPair.getRight())); }); } - transformRestTests = transformer.transformRestTests(new LinkedList<>(tests), transformations); + transformRestTests = transformer.transformRestTests(new LinkedList<>(tests), getTransformations().get()); } // convert to url to ensure forward slashes @@ -490,9 +489,7 @@ public DirectoryProperty getSourceDirectory() { } @Nested - public List> getTransformations() { - return transformations; - } + public abstract ListProperty> getTransformations(); @Input public String getSkippedTestByFilePatternTransformations() { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index bfc1b1e6be960..b0998957910a2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -232,15 +232,7 @@ public class RestHighLevelClient implements Closeable { * {@link RestClient} to be used to perform requests. */ public RestHighLevelClient(RestClientBuilder restClientBuilder) { - this(restClientBuilder, Collections.emptyList()); - } - - /** - * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the - * {@link RestClient} to be used to perform requests and parsers for custom response sections added to Elasticsearch through plugins. - */ - protected RestHighLevelClient(RestClientBuilder restClientBuilder, List namedXContentEntries) { - this(restClientBuilder.build(), RestClient::close, namedXContentEntries); + this(restClientBuilder.build(), RestClient::close, Collections.emptyList()); } /** @@ -265,7 +257,7 @@ protected RestHighLevelClient( * The consumer argument allows to control what needs to be done when the {@link #close()} method is called. * Also subclasses can provide parsers for custom response sections added to Elasticsearch through plugins. */ - protected RestHighLevelClient( + private RestHighLevelClient( RestClient restClient, CheckedConsumer doClose, List namedXContentEntries, @@ -309,17 +301,6 @@ public final void close() throws IOException { doClose.accept(client); } - /** - * Executes a bulk request using the Bulk API. - * See Bulk API on elastic.co - * @param bulkRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - */ - public final BulkResponse bulk(BulkRequest bulkRequest, RequestOptions options) throws IOException { - return performRequestAndParseEntity(bulkRequest, RequestConverters::bulk, options, BulkResponse::fromXContent, emptySet()); - } - /** * Asynchronously executes a bulk request using the Bulk API. * See Bulk API on elastic.co @@ -410,7 +391,7 @@ public final SearchResponse scroll(SearchScrollRequest searchScrollRequest, Requ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. */ @Deprecated - protected final Resp performRequestAndParseEntity( + private Resp performRequestAndParseEntity( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -425,7 +406,7 @@ protected final Resp performRequestAndParseEnt * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. */ @Deprecated - protected final Resp performRequest( + private Resp performRequest( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -439,23 +420,6 @@ protected final Resp performRequest( return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); } - /** - * Defines a helper method for performing a request. - */ - protected final Resp performRequest( - Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction responseConverter, - Set ignores - ) throws IOException { - Optional validationException = request.validate(); - if (validationException != null && validationException.isPresent()) { - throw validationException.get(); - } - return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); - } - /** * Provides common functionality for performing a request. */ @@ -499,7 +463,7 @@ private Resp internalPerformRequest( * @return Cancellable instance that may be used to cancel the request */ @Deprecated - protected final Cancellable performRequestAsyncAndParseEntity( + private Cancellable performRequestAsyncAndParseEntity( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -523,7 +487,7 @@ protected final Cancellable performRequestAsyn * @return Cancellable instance that may be used to cancel the request */ @Deprecated - protected final Cancellable performRequestAsync( + private Cancellable performRequestAsync( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -564,7 +528,7 @@ private Cancellable internalPerformRequestAsync( return performClientRequestAsync(req, responseListener); } - final ResponseListener wrapResponseListener( + private ResponseListener wrapResponseListener( CheckedFunction responseConverter, ActionListener actionListener, Set ignores @@ -611,7 +575,7 @@ public void onFailure(Exception exception) { * that wraps the original {@link ResponseException}. The potential exception obtained while parsing is added to the returned * exception as a suppressed exception. This method is guaranteed to not throw any exception eventually thrown while parsing. */ - protected final ElasticsearchStatusException parseResponseException(ResponseException responseException) { + private ElasticsearchStatusException parseResponseException(ResponseException responseException) { Response response = responseException.getResponse(); HttpEntity entity = response.getEntity(); ElasticsearchStatusException elasticsearchException; @@ -631,7 +595,7 @@ protected final ElasticsearchStatusException parseResponseException(ResponseExce return elasticsearchException; } - protected final Resp parseEntity(final HttpEntity entity, final CheckedFunction entityParser) + private Resp parseEntity(final HttpEntity entity, final CheckedFunction entityParser) throws IOException { if (entity == null) { throw new IllegalStateException("Response body expected but not returned"); @@ -735,7 +699,7 @@ private Cancellable performClientRequestAsync(Request request, ResponseListener ListenableFuture> versionCheck = getVersionValidationFuture(); // Create a future that tracks cancellation of this method's result and forwards cancellation to the actual LLRC request. - CompletableFuture cancellationForwarder = new CompletableFuture(); + CompletableFuture cancellationForwarder = new CompletableFuture<>(); Cancellable result = new Cancellable() { @Override public void cancel() { @@ -754,7 +718,7 @@ void runIfNotCancelled(Runnable runnable) { // Send the request after we have done the version compatibility check. Note that if it has already happened, the listener will // be called immediately on the same thread with no asynchronous scheduling overhead. - versionCheck.addListener(new ActionListener>() { + versionCheck.addListener(new ActionListener<>() { @Override public void onResponse(Optional validation) { if (validation.isPresent() == false) { @@ -779,13 +743,13 @@ public void onFailure(Exception e) { }); return result; - }; + } /** * Go through all the request's existing headers, looking for {@code headerName} headers and if they exist, * changing them to use version compatibility. If no request headers are changed, modify the entity type header if appropriate */ - boolean addCompatibilityFor(RequestOptions.Builder newOptions, Header entityHeader, String headerName) { + private boolean addCompatibilityFor(RequestOptions.Builder newOptions, Header entityHeader, String headerName) { // Modify any existing "Content-Type" headers on the request to use the version compatibility, if available boolean contentTypeModified = false; for (Header header : new ArrayList<>(newOptions.getHeaders())) { @@ -807,7 +771,7 @@ boolean addCompatibilityFor(RequestOptions.Builder newOptions, Header entityHead * Modify the given header to be version compatible, if necessary. * Returns true if a modification was made, false otherwise. */ - boolean modifyHeader(RequestOptions.Builder newOptions, Header header, String headerName) { + private boolean modifyHeader(RequestOptions.Builder newOptions, Header header, String headerName) { for (EntityType type : EntityType.values()) { final String headerValue = header.getValue(); if (headerValue.startsWith(type.header())) { @@ -825,7 +789,7 @@ boolean modifyHeader(RequestOptions.Builder newOptions, Header header, String he * modifying the "Content-Type" and "Accept" headers if present, or modifying the header based * on the request's entity type. */ - void modifyRequestForCompatibility(Request request) { + private void modifyRequestForCompatibility(Request request) { final Header entityHeader = request.getEntity() == null ? null : request.getEntity().getContentType(); final RequestOptions.Builder newOptions = request.getOptions().toBuilder(); @@ -982,7 +946,7 @@ private Optional getVersionValidation(Response response) throws IOExcept return Optional.empty(); } - static List getDefaultNamedXContents() { + private static List getDefaultNamedXContents() { Map> map = new HashMap<>(); map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); map.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c)); @@ -1068,7 +1032,7 @@ static List getDefaultNamedXContents() { /** * Loads and returns the {@link NamedXContentRegistry.Entry} parsers provided by plugins. */ - static List getProvidedNamedXContents() { + private static List getProvidedNamedXContents() { List entries = new ArrayList<>(); for (NamedXContentProvider service : ServiceLoader.load(NamedXContentProvider.class)) { entries.addAll(service.getNamedXContentParsers()); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java deleted file mode 100644 index 7adcee74cb206..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.core; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class AcknowledgedResponse { - - protected static final String PARSE_FIELD_NAME = "acknowledged"; - private static final ConstructingObjectParser PARSER = AcknowledgedResponse.generateParser( - "acknowledged_response", - AcknowledgedResponse::new, - AcknowledgedResponse.PARSE_FIELD_NAME - ); - - private final boolean acknowledged; - - public AcknowledgedResponse(final boolean acknowledged) { - this.acknowledged = acknowledged; - } - - public boolean isAcknowledged() { - return acknowledged; - } - - protected static ConstructingObjectParser generateParser(String name, Function ctor, String parseField) { - ConstructingObjectParser p = new ConstructingObjectParser<>(name, true, args -> ctor.apply((boolean) args[0])); - p.declareBoolean(constructorArg(), new ParseField(parseField)); - return p; - } - - public static AcknowledgedResponse fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final AcknowledgedResponse that = (AcknowledgedResponse) o; - return isAcknowledged() == that.isAcknowledged(); - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged); - } - - /** - * @return the field name this response uses to output the acknowledged flag - */ - protected String getFieldName() { - return PARSE_FIELD_NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java deleted file mode 100644 index a80a6bb2a15b7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.core; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class ShardsAcknowledgedResponse extends AcknowledgedResponse { - - protected static final String SHARDS_PARSE_FIELD_NAME = "shards_acknowledged"; - - private static ConstructingObjectParser buildParser() { - - ConstructingObjectParser p = new ConstructingObjectParser<>( - "freeze", - true, - args -> new ShardsAcknowledgedResponse((boolean) args[0], (boolean) args[1]) - ); - p.declareBoolean(constructorArg(), new ParseField(AcknowledgedResponse.PARSE_FIELD_NAME)); - p.declareBoolean(constructorArg(), new ParseField(SHARDS_PARSE_FIELD_NAME)); - return p; - } - - private static final ConstructingObjectParser PARSER = buildParser(); - - private final boolean shardsAcknowledged; - - public ShardsAcknowledgedResponse(boolean acknowledged, boolean shardsAcknowledged) { - super(acknowledged); - this.shardsAcknowledged = shardsAcknowledged; - } - - public boolean isShardsAcknowledged() { - return shardsAcknowledged; - } - - public static ShardsAcknowledgedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } -} diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 85d38b007e632..6006fae1c2d84 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.conventions.precommit.LicenseHeadersTask @@ -60,7 +60,7 @@ tasks.named("processResources").configure { ] } -tasks.withType(CheckForbiddenApis).configureEach { +tasks.withType(CheckForbiddenApisTask).configureEach { //client does not depend on server, so only jdk and http signatures should be checked replaceSignatureFiles('jdk-signatures', 'http-signatures') } @@ -71,8 +71,11 @@ tasks.named("forbiddenPatterns").configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { signatures -> + signatures -= 'jdk-non-portable' + signatures += 'jdk-internal' + signatures + } } // JarHell is part of es server, which we don't want to pull in diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 546e81445bb89..901917c7b25f8 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -57,8 +57,12 @@ tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } + //client does not depend on server, so only jdk signatures should be checked replaceSignatureFiles 'jdk-signatures' } diff --git a/client/test/build.gradle b/client/test/build.gradle index 18eb16883ab15..9ee222b036cd1 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -40,8 +40,11 @@ tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } //client does not depend on core, so only jdk signatures should be checked replaceSignatureFiles 'jdk-signatures' } diff --git a/distribution/tools/server-cli/build.gradle b/distribution/tools/server-cli/build.gradle index 3ab5e6e86f5ba..623f9d40cd49e 100644 --- a/distribution/tools/server-cli/build.gradle +++ b/distribution/tools/server-cli/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask apply plugin: 'elasticsearch.build' @@ -20,7 +20,7 @@ tasks.named("test").configure { systemProperty "tests.security.manager", "false" } -tasks.withType(CheckForbiddenApis).configureEach { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/docs/changelog/101148.yaml b/docs/changelog/101148.yaml new file mode 100644 index 0000000000000..eabe288e69e88 --- /dev/null +++ b/docs/changelog/101148.yaml @@ -0,0 +1,6 @@ +pr: 101148 +summary: Add support for marking component templates as deprecated +area: Indices APIs +type: enhancement +issues: + - 100992 diff --git a/docs/changelog/101426.yaml b/docs/changelog/101426.yaml new file mode 100644 index 0000000000000..f9053ba1c1ec1 --- /dev/null +++ b/docs/changelog/101426.yaml @@ -0,0 +1,5 @@ +pr: 101426 +summary: Add undesired shard count +area: Allocation +type: enhancement +issues: [] diff --git a/docs/changelog/101629.yaml b/docs/changelog/101629.yaml new file mode 100644 index 0000000000000..1b8691c9798ff --- /dev/null +++ b/docs/changelog/101629.yaml @@ -0,0 +1,5 @@ +pr: 101629 +summary: Health report infrastructure doesn't trip the circuit breakers +area: Health +type: bug +issues: [] diff --git a/docs/changelog/101648.yaml b/docs/changelog/101648.yaml new file mode 100644 index 0000000000000..48e01739aabc0 --- /dev/null +++ b/docs/changelog/101648.yaml @@ -0,0 +1,6 @@ +pr: 101648 +summary: "ESQL: Fix unreleased block in topn" +area: ES|QL +type: bug +issues: + - 101588 diff --git a/docs/changelog/101652.yaml b/docs/changelog/101652.yaml new file mode 100644 index 0000000000000..79e3167696aee --- /dev/null +++ b/docs/changelog/101652.yaml @@ -0,0 +1,5 @@ +pr: 101652 +summary: Fix race condition in `SnapshotsService` +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/changelog/101713.yaml b/docs/changelog/101713.yaml new file mode 100644 index 0000000000000..c3addf9296584 --- /dev/null +++ b/docs/changelog/101713.yaml @@ -0,0 +1,5 @@ +pr: 101713 +summary: Disable `weight_matches` when kNN query is present +area: Highlighting +type: bug +issues: [] diff --git a/docs/changelog/101727.yaml b/docs/changelog/101727.yaml new file mode 100644 index 0000000000000..24a7e1d5b4e48 --- /dev/null +++ b/docs/changelog/101727.yaml @@ -0,0 +1,5 @@ +pr: 101727 +summary: Fix listeners in `SharedBlobCacheService.readMultiRegions` +area: Distributed +type: bug +issues: [] diff --git a/docs/changelog/98916.yaml b/docs/changelog/98916.yaml new file mode 100644 index 0000000000000..a466e3deba009 --- /dev/null +++ b/docs/changelog/98916.yaml @@ -0,0 +1,5 @@ +pr: 98916 +summary: Make knn search a query +area: Vector Search +type: feature +issues: [] diff --git a/docs/painless/painless-contexts/painless-reindex-context.asciidoc b/docs/painless/painless-contexts/painless-reindex-context.asciidoc index 13b216bac6345..9aae1ae70c5ac 100644 --- a/docs/painless/painless-contexts/painless-reindex-context.asciidoc +++ b/docs/painless/painless-contexts/painless-reindex-context.asciidoc @@ -19,7 +19,7 @@ reindexed into a target index. {ref}/mapping-index-field.html[`ctx['_index']`] (`String`):: The name of the index. -{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: +{ref}/mapping-id-field.html[`ctx['_id']`] (`String`):: The unique document id. `ctx['_version']` (`int`):: diff --git a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc index d8f9d4d7bae70..78a8b8d36d6bb 100644 --- a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc +++ b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc @@ -20,7 +20,7 @@ result of query. {ref}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only):: The name of the index. -{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: +{ref}/mapping-id-field.html[`ctx['_id']`] (`String`, read-only):: The unique document id. `ctx['_version']` (`int`, read-only):: diff --git a/docs/painless/painless-contexts/painless-update-context.asciidoc b/docs/painless/painless-contexts/painless-update-context.asciidoc index f9ae3434827d9..53b1008cfebff 100644 --- a/docs/painless/painless-contexts/painless-update-context.asciidoc +++ b/docs/painless/painless-contexts/painless-update-context.asciidoc @@ -18,7 +18,7 @@ add, modify, or delete fields within a single document. {ref}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only):: The name of the index. -{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: +{ref}/mapping-id-field.html[`ctx['_id']`] (`String`, read-only):: The unique document id. `ctx['_version']` (`int`, read-only):: diff --git a/docs/reference/cluster/get-desired-balance.asciidoc b/docs/reference/cluster/get-desired-balance.asciidoc index bd99f1d737bd8..2628b5abca9f3 100644 --- a/docs/reference/cluster/get-desired-balance.asciidoc +++ b/docs/reference/cluster/get-desired-balance.asciidoc @@ -6,7 +6,12 @@ NOTE: {cloud-only} -Exposes the desired balance and basic metrics. +Exposes: +* the desired balance computation and reconciliation stats +* balancing stats such as distribution of shards, disk and ingest forecasts + across nodes and data tiers (based on the current cluster state) +* routing table with each shard current and desired location +* cluster info with nodes disk usages [[get-desired-balance-request]] ==== {api-request-title} @@ -33,6 +38,8 @@ The API returns the following result: "reconciliation_time_in_millis": 0 }, "cluster_balance_stats" : { + "shard_count": 37, + "undesired_shard_allocation_count": 0, "tiers": { "data_hot" : { "shard_count" : { @@ -42,6 +49,13 @@ The API returns the following result: "average" : 2.3333333333333335, "std_dev" : 0.4714045207910317 }, + "undesired_shard_allocation_count" : { + "total" : 0.0, + "min" : 0.0, + "max" : 0.0, + "average" : 0.0, + "std_dev" : 0.0 + }, "forecast_write_load" : { "total" : 21.0, "min" : 6.0, @@ -72,6 +86,13 @@ The API returns the following result: "average" : 1.0, "std_dev" : 0.0 }, + "undesired_shard_allocation_count" : { + "total" : 0.0, + "min" : 0.0, + "max" : 0.0, + "average" : 0.0, + "std_dev" : 0.0 + }, "forecast_write_load" : { "total" : 0.0, "min" : 0.0, @@ -100,6 +121,7 @@ The API returns the following result: "node_id": "UPYt8VwWTt-IADAEbqpLxA", "roles": ["data_content"], "shard_count": 10, + "undesired_shard_allocation_count": 0, "forecast_write_load": 8.5, "forecast_disk_usage_bytes": 498435, "actual_disk_usage_bytes": 498435 @@ -108,6 +130,7 @@ The API returns the following result: "node_id": "bgC66tboTIeFQ0VgRGI4Gg", "roles": ["data_content"], "shard_count": 15, + "undesired_shard_allocation_count": 0, "forecast_write_load": 3.25, "forecast_disk_usage_bytes": 384935, "actual_disk_usage_bytes": 384935 @@ -116,6 +139,7 @@ The API returns the following result: "node_id": "2x1VTuSOQdeguXPdN73yRw", "roles": ["data_content"], "shard_count": 12, + "undesired_shard_allocation_count": 0, "forecast_write_load": 6.0, "forecast_disk_usage_bytes": 648766, "actual_disk_usage_bytes": 648766 diff --git a/docs/reference/eql/syntax.asciidoc b/docs/reference/eql/syntax.asciidoc index f592610f487c9..33a6fb745ac54 100644 --- a/docs/reference/eql/syntax.asciidoc +++ b/docs/reference/eql/syntax.asciidoc @@ -243,7 +243,7 @@ my_field like ("Value-*", "VALUE2", "VAL?") // case-sensitive my_field like~ ("value-*", "value2", "val?") // case-insensitive my_field regex ("[vV]alue-[0-9]", "VALUE[^2].?", "VAL3") // case-sensitive -my_field regex~ ("value-[0-9]", "value[^2].?", "val3") // case-sensitive +my_field regex~ ("value-[0-9]", "value[^2].?", "val3") // case-insensitive ---- `in` (case-sensitive):: diff --git a/docs/reference/indices/put-component-template.asciidoc b/docs/reference/indices/put-component-template.asciidoc index 794f01cb7f3ae..faf7e67039de7 100644 --- a/docs/reference/indices/put-component-template.asciidoc +++ b/docs/reference/indices/put-component-template.asciidoc @@ -169,6 +169,12 @@ created. Optional user metadata about the component template. May have any contents. This map is not automatically generated by {es}. +`deprecated`:: +(Optional, boolean) +Marks this component template as deprecated. +When a deprecated component template is referenced when creating or updating a non-deprecated index template, +{es} will emit a deprecation warning. + [[put-component-template-api-example]] ==== {api-examples-title} diff --git a/docs/reference/indices/put-index-template.asciidoc b/docs/reference/indices/put-index-template.asciidoc index 4dfd7252a9fa5..b9460bda86a09 100644 --- a/docs/reference/indices/put-index-template.asciidoc +++ b/docs/reference/indices/put-index-template.asciidoc @@ -167,6 +167,12 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=settings] (Optional, integer) Version number used to manage index templates externally. This number is not automatically generated by {es}. + +`deprecated`:: +(Optional, boolean) +Marks this index template as deprecated. +When creating or updating a non-deprecated index template that uses deprecated components, +{es} will emit a deprecation warning. // end::index-template-api-body[] [[put-index-template-api-example]] diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 874bfa64d3551..c9c3e16458618 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -2,6 +2,8 @@ [[delete-inference-api]] === Delete {infer} API +experimental[] + Deletes an {infer} model deployment. diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index bbf1d59c56213..b81f2663ec9e1 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -2,6 +2,8 @@ [[get-inference-api]] === Get {infer} API +experimental[] + Retrieves {infer} model information. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index ec1f01bc4d093..0476ac57287d9 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -2,6 +2,8 @@ [[inference-apis]] == {infer-cap} APIs +experimental[] + You can use the following APIs to manage {infer} models and perform {infer}: * <> diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 99dd4a059519f..f26a73d093091 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -2,6 +2,8 @@ [[post-inference-api]] === Perform inference API +experimental[] + Performs an inference task on an input text by using an {infer} model. diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index f4737875971c7..3b8cd19aded53 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -2,6 +2,8 @@ [[put-inference-api]] === Create {infer} API +experimental[] + Creates a model to perform an {infer} task. diff --git a/docs/reference/ingest/apis/put-pipeline.asciidoc b/docs/reference/ingest/apis/put-pipeline.asciidoc index 97c6a176dc256..ab1139b999952 100644 --- a/docs/reference/ingest/apis/put-pipeline.asciidoc +++ b/docs/reference/ingest/apis/put-pipeline.asciidoc @@ -94,6 +94,12 @@ how the version attribute is used. (Optional, object) Optional metadata about the ingest pipeline. May have any contents. This map is not automatically generated by {es}. + +`deprecated`:: +(Optional, boolean) +Marks this ingest pipeline as deprecated. +When a deprecated ingest pipeline is referenced as the default or final pipeline when creating or updating a non-deprecated index template, +{es} will emit a deprecation warning. // end::pipeline-object[] [[put-pipeline-api-example]] diff --git a/docs/reference/query-dsl/knn-query.asciidoc b/docs/reference/query-dsl/knn-query.asciidoc new file mode 100644 index 0000000000000..f9cc31748ef71 --- /dev/null +++ b/docs/reference/query-dsl/knn-query.asciidoc @@ -0,0 +1,222 @@ +[[query-dsl-knn-query]] +=== Knn query +++++ +Knn +++++ + +Finds the _k_ nearest vectors to a query vector, as measured by a similarity +metric. _knn_ query finds nearest vectors through approximate search on indexed +dense_vectors. The preferred way to do approximate kNN search is through the +<> of a search request. _knn_ query is reserved for +expert cases, where there is a need to combine this query with other queries. + +[[knn-query-ex-request]] +==== Example request + +[source,console] +---- +PUT my-image-index +{ + "mappings": { + "properties": { + "image-vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "l2_norm" + }, + "file-type": { + "type": "keyword" + } + } + } +} +---- + +. Index your data. ++ +[source,console] +---- +POST my-image-index/_bulk?refresh=true +{ "index": { "_id": "1" } } +{ "image-vector": [1, 5, -20], "file-type": "jpg" } +{ "index": { "_id": "2" } } +{ "image-vector": [42, 8, -15], "file-type": "png" } +{ "index": { "_id": "3" } } +{ "image-vector": [15, 11, 23], "file-type": "jpg" } +---- +//TEST[continued] + +. Run the search using the `knn` query, asking for the top 3 nearest vectors. ++ +[source,console] +---- +POST my-image-index/_search +{ + "size" : 3, + "query" : { + "knn": { + "field": "image-vector", + "query_vector": [-5, 9, -12], + "num_candidates": 10 + } + } +} +---- +//TEST[continued] + +NOTE: `knn` query doesn't have a separate `k` parameter. `k` is defined by +`size` parameter of a search request similar to other queries. `knn` query +collects `num_candidates` results from each shard, then merges them to get +the top `size` results. + + +[[knn-query-top-level-parameters]] +==== Top-level parameters for `knn` + +`field`:: ++ +-- +(Required, string) The name of the vector field to search against. Must be a +<>. +-- + +`query_vector`:: ++ +-- +(Required, array of floats) Query vector. Must have the same number of dimensions +as the vector field you are searching against. +-- + +`num_candidates`:: ++ +-- +(Required, integer) The number of nearest neighbor candidates to consider per shard. +Cannot exceed 10,000. {es} collects `num_candidates` results from each shard, then +merges them to find the top results. Increasing `num_candidates` tends to improve the +accuracy of the final results. +-- + +`filter`:: ++ +-- +(Optional, query object) Query to filter the documents that can match. +The kNN search will return the top documents that also match this filter. +The value can be a single query or a list of queries. If `filter` is not provided, +all documents are allowed to match. + +The filter is a pre-filter, meaning that it is applied **during** the approximate +kNN search to ensure that `num_candidates` matching documents are returned. +-- + +`similarity`:: ++ +-- +(Optional, float) The minimum similarity required for a document to be considered +a match. The similarity value calculated relates to the raw +<> used. Not the document score. The matched +documents are then scored according to <> +and the provided `boost` is applied. +-- + +`boost`:: ++ +-- +(Optional, float) Floating point number used to multiply the +scores of matched documents. This value cannot be negative. Defaults to `1.0`. +-- + +`_name`:: ++ +-- +(Optional, string) Name field to identify the query +-- + +[[knn-query-filtering]] +==== Pre-filters and post-filters in knn query + +There are two ways to filter documents that match a kNN query: + +. **pre-filtering** – filter is applied during the approximate kNN search +to ensure that `k` matching documents are returned. +. **post-filtering** – filter is applied after the approximate kNN search +completes, which results in fewer than k results, even when there are enough +matching documents. + +Pre-filtering is supported through the `filter` parameter of the `knn` query. +Also filters from <> are applied as pre-filters. + +All other filters found in the Query DSL tree are applied as post-filters. +For example, `knn` query finds the top 3 documents with the nearest vectors +(num_candidates=3), which are combined with `term` filter, that is +post-filtered. The final set of documents will contain only a single document +that passes the post-filter. + + +[source,console] +---- +POST my-image-index/_search +{ + "size" : 10, + "query" : { + "bool" : { + "must" : { + "knn": { + "field": "image-vector", + "query_vector": [-5, 9, -12], + "num_candidates": 3 + } + }, + "filter" : { + "term" : { "file-type" : "png" } + } + } + } +} +---- +//TEST[continued] + +[[knn-query-with-nested-query]] +==== Knn query inside a nested query + +`knn` query can be used inside a nested query. The behaviour here is similar +to <>: + +* kNN search over nested dense_vectors diversifies the top results over +the top-level document +* `filter` over the top-level document metadata is supported and acts as a +post-filter +* `filter` over `nested` field metadata is not supported + +A sample query can look like below: + +[source,js] +---- +{ + "query" : { + "nested" : { + "path" : "paragraph", + "query" : { + "knn": { + "query_vector": [ + 0.45, + 45 + ], + "field": "paragraph.vector", + "num_candidates": 2 + } + } + } + } +} +---- +// NOTCONSOLE + +[[knn-query-aggregations]] +==== Knn query with aggregations +`knn` query calculates aggregations on `num_candidates` from each shard. +Thus, the final results from aggregations contain +`num_candidates * number_of_shards` documents. This is different from +the <> where aggregations are +calculated on the global top k nearest documents. + diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc index a6d35d4f9b707..d46377f698359 100644 --- a/docs/reference/query-dsl/special-queries.asciidoc +++ b/docs/reference/query-dsl/special-queries.asciidoc @@ -17,6 +17,10 @@ or collection of documents. This query finds queries that are stored as documents that match with the specified document. +<>:: +A query that finds the _k_ nearest vectors to a query +vector, as measured by a similarity metric. + <>:: A query that computes scores based on the values of numeric features and is able to efficiently skip non-competitive hits. @@ -43,6 +47,8 @@ include::mlt-query.asciidoc[] include::percolate-query.asciidoc[] +include::knn-query.asciidoc[] + include::rank-feature-query.asciidoc[] include::script-query.asciidoc[] diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 8c676a5515ca3..4bf1ceabe08d8 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -43,7 +43,7 @@ based on a similarity metric, the better its match. {es} supports two methods for kNN search: * <> using the `knn` search -option +option or `knn` query * <> using a `script_score` query with a vector function @@ -129,7 +129,8 @@ POST image-index/_bulk?refresh=true //TEST[continued] //TEST[s/\.\.\.//] -. Run the search using the <>. +. Run the search using the <> or the +<> (expert case). + [source,console] ---- diff --git a/docs/reference/settings/data-stream-lifecycle-settings.asciidoc b/docs/reference/settings/data-stream-lifecycle-settings.asciidoc index 8c3f4c793e5e0..023a8fcf860eb 100644 --- a/docs/reference/settings/data-stream-lifecycle-settings.asciidoc +++ b/docs/reference/settings/data-stream-lifecycle-settings.asciidoc @@ -51,6 +51,17 @@ segment size is a way to prevent indices from having a long tail of very small s This setting controls what value does <> configures on the target index. It defaults to `100MB`. +[[data-streams-lifecycle-signalling-error-retry-interval]] +`data_streams.lifecycle.signalling.error_retry_interval`:: +(<>, integer) +Represents the number of retries data stream lifecycle has to perform for an index +in an error step in order to signal that the index is not progressing (i.e. it's +stuck in an error step). +The current signalling mechanism is a log statement at the `error` level however, +the signalling mechanism can be extended in the future. +Defaults to 10 retries. + + ==== Index level settings The following index-level settings are typically configured on the backing indices of a data stream. diff --git a/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java b/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java index 61d42e5db7083..5ec0d129b8f95 100644 --- a/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java +++ b/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java @@ -29,9 +29,8 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SslConfigurationLoaderTests extends ESTestCase { +public final class SslConfigurationLoaderTests extends ESTestCase { - @SuppressWarnings("this-escape") private final Path certRoot = getDataPath("/certs/ca1/ca.crt").getParent().getParent(); private Settings settings; diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java index fe9b1f673f715..37bdf37ce51a5 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java @@ -26,7 +26,7 @@ /** * A single centroid which represents a number of data points. */ -public class Centroid implements Comparable { +public final class Centroid implements Comparable { private static final AtomicInteger uniqueCount = new AtomicInteger(1); private double centroid = 0; @@ -40,19 +40,16 @@ private Centroid() { id = uniqueCount.getAndIncrement(); } - @SuppressWarnings("this-escape") public Centroid(double x) { this(); start(x, 1, uniqueCount.getAndIncrement()); } - @SuppressWarnings("this-escape") public Centroid(double x, long w) { this(); start(x, w, uniqueCount.getAndIncrement()); } - @SuppressWarnings("this-escape") public Centroid(double x, long w, int id) { this(); start(x, w, id); diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java index 57649f7e3dfa6..07bbc5c55f7cd 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java @@ -52,7 +52,7 @@ public class APMMeterRegistry implements MeterRegistry { private final Registrar longGauges = new Registrar<>(); private final Registrar longHistograms = new Registrar<>(); - private final Meter meter; + private Meter meter; public APMMeterRegistry(Meter meter) { this.meter = meter; @@ -170,8 +170,9 @@ public LongHistogram getLongHistogram(String name) { public void setProvider(Meter meter) { try (ReleasableLock lock = registerLock.acquire()) { + this.meter = meter; for (Registrar registrar : registrars) { - registrar.setProvider(meter); + registrar.setProvider(this.meter); } } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java index 61b53f2087f6e..2a806ca19a4e0 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java @@ -25,6 +25,7 @@ * @param delegated instrument */ public abstract class AbstractInstrument implements Instrument { + private static final int MAX_NAME_LENGTH = 63; // TODO(stu): change to 255 when we upgrade to otel 1.30+, see #101679 private final AtomicReference delegate; private final String name; private final String description; @@ -33,6 +34,11 @@ public abstract class AbstractInstrument implements Instrument { @SuppressWarnings("this-escape") public AbstractInstrument(Meter meter, String name, String description, String unit) { this.name = Objects.requireNonNull(name); + if (name.length() > MAX_NAME_LENGTH) { + throw new IllegalArgumentException( + "Instrument name [" + name + "] with length [" + name.length() + "] exceeds maximum length [" + MAX_NAME_LENGTH + "]" + ); + } this.description = Objects.requireNonNull(description); this.unit = Objects.requireNonNull(unit); this.delegate = new AtomicReference<>(doBuildInstrument(meter)); diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java index 38fb0f0e0a8ac..b393edd6e58e3 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java @@ -16,15 +16,20 @@ import org.elasticsearch.telemetry.apm.internal.APMMeterService; import org.elasticsearch.telemetry.apm.internal.TestAPMMeterService; import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; public class APMMeterRegistryTests extends ESTestCase { - Meter testOtel = OpenTelemetry.noop().getMeter("test"); + Meter testOtel = new RecordingOtelMeter(); Meter noopOtel = OpenTelemetry.noop().getMeter("noop"); + private Settings TELEMETRY_ENABLED = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + public void testMeterIsSetUponConstruction() { // test default APMMeterService apmMeter = new APMMeterService(Settings.EMPTY, () -> testOtel, () -> noopOtel); @@ -33,14 +38,13 @@ public void testMeterIsSetUponConstruction() { assertThat(meter, sameInstance(noopOtel)); // test explicitly enabled - var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); + apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(testOtel)); // test explicitly disabled - settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build(); apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); meter = apmMeter.getMeterRegistry().getMeter(); @@ -60,9 +64,7 @@ public void testMeterIsOverridden() { } public void testLookupByName() { - var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - - var apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel).getMeterRegistry(); + var apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel).getMeterRegistry(); DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("name", "desc", "unit"); DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("name"); @@ -71,8 +73,7 @@ public void testLookupByName() { } public void testNoopIsSetOnStop() { - var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - APMMeterService apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); + APMMeterService apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); apmMeter.start(); Meter meter = apmMeter.getMeterRegistry().getMeter(); @@ -84,4 +85,16 @@ public void testNoopIsSetOnStop() { assertThat(meter, sameInstance(noopOtel)); } + public void testMaxNameLength() { + APMMeterService apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); + apmMeter.start(); + int max_length = 63; + var counter = apmMeter.getMeterRegistry().registerLongCounter("a".repeat(max_length), "desc", "count"); + assertThat(counter, instanceOf(LongCounter.class)); + IllegalArgumentException iae = expectThrows( + IllegalArgumentException.class, + () -> apmMeter.getMeterRegistry().registerLongCounter("a".repeat(max_length + 1), "desc", "count") + ); + assertThat(iae.getMessage(), containsString("exceeds maximum length [63]")); + } } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java index f18d39fb39c6c..d1c74681c2bd7 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java @@ -90,6 +90,7 @@ public ObservableLongCounter buildWithCallback(Consumer contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index bc0a5f87e25d3..4e30d87b6a174 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -21,8 +21,7 @@ import java.util.List; import java.util.Map; -public class RestSearchTemplateActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestSearchTemplateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java index 9e5ca7a3cdc05..0430fe3404f91 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; import org.elasticsearch.index.query.QueryBuilders; @@ -27,6 +26,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -66,16 +66,19 @@ public void testHighlightingWithMatchOnlyTextFieldMatchPhrase() throws IOExcepti BulkResponse bulkItemResponses = bulk.get(); assertNoFailures(bulkItemResponses); - SearchResponse searchResponse = prepareSearch("test").setQuery( - QueryBuilders.matchPhraseQuery("message", "marking and sending shard") - ).setSize(500).highlighter(new HighlightBuilder().field("message")).get(); - assertNoFailures(searchResponse); - for (SearchHit searchHit : searchResponse.getHits()) { - assertThat( - searchHit.getHighlightFields().get("message").fragments()[0].string(), - containsString("marking and sending shard") - ); - } + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("message", "marking and sending shard")) + .setSize(500) + .highlighter(new HighlightBuilder().field("message")), + searchResponse -> { + for (SearchHit searchHit : searchResponse.getHits()) { + assertThat( + searchHit.getHighlightFields().get("message").fragments()[0].string(), + containsString("marking and sending shard") + ); + } + } + ); } public void testHighlightingWithMatchOnlyTextFieldSyntheticSource() throws IOException { @@ -112,16 +115,19 @@ public void testHighlightingWithMatchOnlyTextFieldSyntheticSource() throws IOExc BulkResponse bulkItemResponses = bulk.get(); assertNoFailures(bulkItemResponses); - SearchResponse searchResponse = prepareSearch("test").setQuery( - QueryBuilders.matchPhraseQuery("message", "marking and sending shard") - ).setSize(500).highlighter(new HighlightBuilder().field("message")).get(); - assertNoFailures(searchResponse); - for (SearchHit searchHit : searchResponse.getHits()) { - assertThat( - searchHit.getHighlightFields().get("message").fragments()[0].string(), - containsString("marking and sending shard") - ); - } + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("message", "marking and sending shard")) + .setSize(500) + .highlighter(new HighlightBuilder().field("message")), + searchResponse -> { + for (SearchHit searchHit : searchResponse.getHits()) { + assertThat( + searchHit.getHighlightFields().get("message").fragments()[0].string(), + containsString("marking and sending shard") + ); + } + } + ); } } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index 87699f285063f..3a7f9a1ca6eb5 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; import org.elasticsearch.index.query.QueryBuilders; @@ -22,6 +21,7 @@ import java.util.Collection; import java.util.Map; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -39,39 +39,51 @@ protected Collection> nodePlugins() { public void testRankFeaturesTermQuery() throws IOException { init(); - SearchResponse response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)).get(); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - for (SearchHit hit : response.getHits().getHits()) { - assertThat(hit.getScore(), equalTo(20f)); - } - - response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)).get(); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - for (SearchHit hit : response.getHits().getHits()) { - assertThat(hit.getScore(), equalTo(2000f)); - } - - response = prepareSearch(INDEX_NAME).setQuery( - QueryBuilders.boolQuery() - .should(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)) - .should(QueryBuilders.termQuery(FIELD_NAME, LOWER_RANKED_FEATURE).boost(3f)) - .minimumShouldMatch(1) - ).get(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - for (SearchHit hit : response.getHits().getHits()) { - if (hit.getId().equals("all")) { - assertThat(hit.getScore(), equalTo(50f)); + assertNoFailuresAndResponse( + prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertThat(hit.getScore(), equalTo(20f)); + } } - if (hit.getId().equals("lower")) { - assertThat(hit.getScore(), equalTo(30f)); + ); + assertNoFailuresAndResponse( + prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertThat(hit.getScore(), equalTo(2000f)); + } } - if (hit.getId().equals("higher")) { - assertThat(hit.getScore(), equalTo(20f)); - } - } + ); - response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")).get(); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertNoFailuresAndResponse( + prepareSearch(INDEX_NAME).setQuery( + QueryBuilders.boolQuery() + .should(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)) + .should(QueryBuilders.termQuery(FIELD_NAME, LOWER_RANKED_FEATURE).boost(3f)) + .minimumShouldMatch(1) + ), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + for (SearchHit hit : searchResponse.getHits().getHits()) { + if (hit.getId().equals("all")) { + assertThat(hit.getScore(), equalTo(50f)); + } + if (hit.getId().equals("lower")) { + assertThat(hit.getScore(), equalTo(30f)); + } + if (hit.getId().equals("higher")) { + assertThat(hit.getScore(), equalTo(20f)); + } + } + } + ); + assertNoFailuresAndResponse( + prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")), + response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + ); } private void init() throws IOException { diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 9362080c9cb33..cad976411b8da 100644 --- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -9,6 +9,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; @@ -22,10 +23,12 @@ import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -1295,4 +1298,34 @@ public void testWithWildcardFieldNames() throws Exception { ).get(); assertEquals(1, response.getHits().getTotalHits().value); } + + public void testKnnQueryNotSupportedInPercolator() throws IOException { + String mappings = org.elasticsearch.common.Strings.format(""" + { + "properties": { + "my_query" : { + "type" : "percolator" + }, + "my_vector" : { + "type" : "dense_vector", + "dims" : 5, + "index" : true, + "similarity" : "l2_norm" + } + + } + } + """); + indicesAdmin().prepareCreate("index1").setMapping(mappings).get(); + ensureGreen(); + QueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder("my_vector", new float[] { 1, 1, 1, 1, 1 }, 10, null); + + IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index1") + .setId("knn_query1") + .setSource(jsonBuilder().startObject().field("my_query", knnVectorQueryBuilder).endObject()); + + DocumentParsingException exception = expectThrows(DocumentParsingException.class, () -> indexRequestBuilder.get()); + assertThat(exception.getMessage(), containsString("the [knn] query is unsupported inside a percolator")); + } + } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index c00eaa894dd69..e212264287937 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -61,6 +61,7 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.ByteArrayOutputStream; @@ -438,6 +439,8 @@ static QueryBuilder parseQueryBuilder(DocumentParserContext context) { throw new IllegalArgumentException("the [has_child] query is unsupported inside a percolator query"); } else if (queryName.equals("has_parent")) { throw new IllegalArgumentException("the [has_parent] query is unsupported inside a percolator query"); + } else if (queryName.equals(KnnVectorQueryBuilder.NAME)) { + throw new IllegalArgumentException("the [knn] query is unsupported inside a percolator query"); } }); } catch (IOException e) { diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java index 15f9798abe88b..ce63bcba0345c 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java @@ -26,7 +26,7 @@ /** * Request to perform a search ranking evaluation. */ -public class RankEvalRequest extends ActionRequest implements IndicesRequest.Replaceable { +public final class RankEvalRequest extends ActionRequest implements IndicesRequest.Replaceable { private RankEvalSpec rankingEvaluationSpec; @@ -35,7 +35,6 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep private SearchType searchType = SearchType.DEFAULT; - @SuppressWarnings("this-escape") public RankEvalRequest(RankEvalSpec rankingEvaluationSpec, String[] indices) { this.rankingEvaluationSpec = Objects.requireNonNull(rankingEvaluationSpec, "ranking evaluation specification must not be null"); indices(indices); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java index f99a22cbac6ef..982d1afcf6dd3 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java @@ -30,10 +30,9 @@ import static org.mockito.Mockito.mock; -public class TransportRankEvalActionTests extends ESTestCase { +public final class TransportRankEvalActionTests extends ESTestCase { - @SuppressWarnings("this-escape") - private Settings settings = Settings.builder() + private final Settings settings = Settings.builder() .put("path.home", createTempDir().toString()) .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index 8e1cfb309a671..fdd98992503d7 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -23,9 +23,8 @@ import java.util.List; import java.util.Map; -public class RestDeleteByQueryActionTests extends RestActionTestCase { +public final class RestDeleteByQueryActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index 7222b5efe9c85..889c8d0091c81 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -23,9 +23,8 @@ import java.util.List; import java.util.Map; -public class RestUpdateByQueryActionTests extends RestActionTestCase { +public final class RestUpdateByQueryActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index fbf57a0198644..16a9f60a3d28d 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -35,7 +35,7 @@ public class AzureBlobContainer extends AbstractBlobContainer { - private final Logger logger = LogManager.getLogger(AzureBlobContainer.class); + private static final Logger logger = LogManager.getLogger(AzureBlobContainer.class); private final AzureBlobStore blobStore; private final String keyPath; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java index ae5ae07c9078a..cdfd83b79b370 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java @@ -239,8 +239,8 @@ protected void doStop() { protected void doClose() throws IOException {} private static final class SuccessfulRequestTracker implements HttpPipelinePolicy { + private static final Logger logger = LogManager.getLogger(SuccessfulRequestTracker.class); private final BiConsumer onSuccessfulRequest; - private final Logger logger = LogManager.getLogger(SuccessfulRequestTracker.class); private SuccessfulRequestTracker(BiConsumer onSuccessfulRequest) { this.onSuccessfulRequest = onSuccessfulRequest; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java index 3c131affbb84c..2dff8a10d39f7 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/CancellableRateLimitedFluxIterator.java @@ -50,7 +50,7 @@ public void cancel() { private final Condition condition; private final Consumer cleaner; private final AtomicReference subscription = new AtomicReference<>(); - private final Logger logger = LogManager.getLogger(CancellableRateLimitedFluxIterator.class); + private static final Logger logger = LogManager.getLogger(CancellableRateLimitedFluxIterator.class); private volatile Throwable error; private volatile boolean done; private int emittedElements; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/executors/ReactorScheduledExecutorService.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/executors/ReactorScheduledExecutorService.java index a3359e07119b5..5f18a417dcccd 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/executors/ReactorScheduledExecutorService.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/executors/ReactorScheduledExecutorService.java @@ -37,7 +37,7 @@ public class ReactorScheduledExecutorService extends AbstractExecutorService implements ScheduledExecutorService { private final ThreadPool threadPool; private final ExecutorService delegate; - private final Logger logger = LogManager.getLogger(ReactorScheduledExecutorService.class); + private static final Logger logger = LogManager.getLogger(ReactorScheduledExecutorService.class); public ReactorScheduledExecutorService(ThreadPool threadPool, String executorName) { this.threadPool = threadPool; diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/RetryingHttpInputStream.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/RetryingHttpInputStream.java index 70aaf9864d56d..ddf23bce09721 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/RetryingHttpInputStream.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/RetryingHttpInputStream.java @@ -33,7 +33,7 @@ class RetryingHttpInputStream extends InputStream { public static final int MAX_SUPPRESSED_EXCEPTIONS = 10; public static final long MAX_RANGE_VAL = Long.MAX_VALUE - 1; - private final Logger logger = LogManager.getLogger(RetryingHttpInputStream.class); + private static final Logger logger = LogManager.getLogger(RetryingHttpInputStream.class); private final String blobName; private final URI blobURI; diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/URLHttpClient.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/URLHttpClient.java index 42bece3dbea16..490787714ff3a 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/URLHttpClient.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/URLHttpClient.java @@ -37,7 +37,7 @@ public class URLHttpClient implements Closeable { public static final int MAX_ERROR_MESSAGE_BODY_SIZE = 1024; private static final int MAX_CONNECTIONS = 50; - private final Logger logger = LogManager.getLogger(URLHttpClient.class); + private static final Logger logger = LogManager.getLogger(URLHttpClient.class); private final CloseableHttpClient client; private final URLHttpClientSettings httpClientSettings; @@ -142,7 +142,7 @@ public void close() throws IOException { }; } - private void handleInvalidResponse(CloseableHttpResponse response) { + private static void handleInvalidResponse(CloseableHttpResponse response) { int statusCode = response.getStatusLine().getStatusCode(); String errorBody = parseBodyAsString(response, MAX_ERROR_MESSAGE_BODY_SIZE); throw new URLHttpClientException(statusCode, createErrorMessage(statusCode, errorBody)); @@ -156,7 +156,7 @@ static String createErrorMessage(int statusCode, String errorMessage) { } } - private String parseBodyAsString(CloseableHttpResponse response, int maxSize) { + private static String parseBodyAsString(CloseableHttpResponse response, int maxSize) { String errorMessage = ""; InputStream bodyContent = null; try { diff --git a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java index 6d89571e5af90..1bb2116cc680a 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java +++ b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java @@ -12,14 +12,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.test.rest.ESRestTestCase; @@ -44,7 +42,6 @@ * This test ensure that we keep the search states of a CCS request correctly when the local and remote clusters * have different but compatible versions. See SearchService#createAndPutReaderContext */ -@SuppressWarnings("removal") public class SearchStatesIT extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(SearchStatesIT.class); @@ -90,7 +87,7 @@ static List parseHosts(String props) { public static void configureRemoteClusters(List remoteNodes) throws Exception { assertThat(remoteNodes, hasSize(3)); final String remoteClusterSettingPrefix = "cluster.remote." + CLUSTER_ALIAS + "."; - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { final Settings remoteConnectionSettings; if (randomBoolean()) { final List seeds = remoteNodes.stream() @@ -124,28 +121,32 @@ public static void configureRemoteClusters(List remoteNodes) throws Except } } - static RestHighLevelClient newLocalClient() { + static RestClient newLocalClient() { final List hosts = parseHosts("tests.rest.cluster"); final int index = random().nextInt(hosts.size()); LOGGER.info("Using client node {}", index); - return new RestHighLevelClient(RestClient.builder(hosts.get(index))); + return RestClient.builder(hosts.get(index)).build(); } - static RestHighLevelClient newRemoteClient() { - return new RestHighLevelClient(RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster")))); + static RestClient newRemoteClient() { + return RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster"))).build(); } - static int indexDocs(RestHighLevelClient client, String index, int numDocs) throws IOException { + static int indexDocs(RestClient client, String index, int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { - client.index(new IndexRequest(index).id("id_" + i).source("f", i), RequestOptions.DEFAULT); + Request createDoc = new Request("POST", "/" + index + "/_doc/id_" + i); + createDoc.setJsonEntity(Strings.format(""" + { "f": %s } + """, i)); + assertOK(client.performRequest(createDoc)); } - refresh(client.getLowLevelClient(), index); + refresh(client, index); return numDocs; } void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int remoteNumDocs, Integer preFilterShardSize) { - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { Request request = new Request("POST", "/_search"); final int expectedDocs; if (randomBoolean()) { @@ -185,56 +186,40 @@ void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int r public void testBWCSearchStates() throws Exception { String localIndex = "test_bwc_search_states_index"; String remoteIndex = "test_bwc_search_states_remote_index"; - try (RestHighLevelClient localClient = newLocalClient(); RestHighLevelClient remoteClient = newRemoteClient()) { - createIndex( - localClient.getLowLevelClient(), - localIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build() - ); + try (RestClient localClient = newLocalClient(); RestClient remoteClient = newRemoteClient()) { + createIndex(localClient, localIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build()); int localNumDocs = indexDocs(localClient, localIndex, between(10, 100)); - createIndex( - remoteClient.getLowLevelClient(), - remoteIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build() - ); + createIndex(remoteClient, remoteIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build()); int remoteNumDocs = indexDocs(remoteClient, remoteIndex, between(10, 100)); - configureRemoteClusters(getNodes(remoteClient.getLowLevelClient())); + configureRemoteClusters(getNodes(remoteClient)); int iterations = between(1, 20); for (int i = 0; i < iterations; i++) { verifySearch(localIndex, localNumDocs, CLUSTER_ALIAS + ":" + remoteIndex, remoteNumDocs, null); } - deleteIndex(localClient.getLowLevelClient(), localIndex); - deleteIndex(remoteClient.getLowLevelClient(), remoteIndex); + deleteIndex(localClient, localIndex); + deleteIndex(remoteClient, remoteIndex); } } public void testCanMatch() throws Exception { String localIndex = "test_can_match_local_index"; String remoteIndex = "test_can_match_remote_index"; - try (RestHighLevelClient localClient = newLocalClient(); RestHighLevelClient remoteClient = newRemoteClient()) { - createIndex( - localClient.getLowLevelClient(), - localIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build() - ); + try (RestClient localClient = newLocalClient(); RestClient remoteClient = newRemoteClient()) { + createIndex(localClient, localIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build()); int localNumDocs = indexDocs(localClient, localIndex, between(10, 100)); - createIndex( - remoteClient.getLowLevelClient(), - remoteIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build() - ); + createIndex(remoteClient, remoteIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build()); int remoteNumDocs = indexDocs(remoteClient, remoteIndex, between(10, 100)); - configureRemoteClusters(getNodes(remoteClient.getLowLevelClient())); + configureRemoteClusters(getNodes(remoteClient)); int iterations = between(1, 10); for (int i = 0; i < iterations; i++) { verifySearch(localIndex, localNumDocs, CLUSTER_ALIAS + ":" + remoteIndex, remoteNumDocs, between(1, 10)); } - deleteIndex(localClient.getLowLevelClient(), localIndex); - deleteIndex(remoteClient.getLowLevelClient(), remoteIndex); + deleteIndex(localClient, localIndex); + deleteIndex(remoteClient, remoteIndex); } } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java index d1fefd425ae7f..54f82b2366d14 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java @@ -310,7 +310,7 @@ public static void restartElasticsearch(Shell sh, Installation installation) thr * when instantiated, and advancing that cursor when the {@code clear()} * method is called. */ - public static class JournaldWrapper { + public static final class JournaldWrapper { private Shell sh; private String cursor; @@ -318,7 +318,6 @@ public static class JournaldWrapper { * Create a new wrapper for Elasticsearch JournalD logs. * @param sh A shell with appropriate permissions. */ - @SuppressWarnings("this-escape") public JournaldWrapper(Shell sh) { this.sh = sh; clear(); diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java index 824f4db5c4cf5..d9be4045c37e0 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -26,10 +25,8 @@ import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; import java.util.function.Consumer; -@SuppressWarnings("removal") public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase { private static final String USER = "x_pack_rest_user"; @@ -40,8 +37,8 @@ protected boolean preserveClusterUponCompletion() { return true; } - private static RestHighLevelClient cluster1Client; - private static RestHighLevelClient cluster2Client; + private static RestClient cluster1Client; + private static RestClient cluster2Client; private static boolean initialized = false; @Override @@ -62,8 +59,8 @@ public void initClientsAndConfigureClusters() throws Exception { request.addParameter("wait_for_status", "yellow"); request.addParameter("wait_for_nodes", "1"); }; - ensureHealth(cluster1Client().getLowLevelClient(), waitForYellowRequest); - ensureHealth(cluster2Client().getLowLevelClient(), waitForYellowRequest); + ensureHealth(cluster1Client, waitForYellowRequest); + ensureHealth(cluster2Client, waitForYellowRequest); initialized = true; } @@ -86,28 +83,22 @@ public static void destroyClients() throws IOException { } } - protected static RestHighLevelClient cluster1Client() { + protected static RestClient cluster1Client() { return cluster1Client; } - protected static RestHighLevelClient cluster2Client() { + protected static RestClient cluster2Client() { return cluster2Client; } - private static class HighLevelClient extends RestHighLevelClient { - private HighLevelClient(RestClient restClient) { - super(restClient, RestClient::close, Collections.emptyList()); - } - } - - private RestHighLevelClient buildClient(final String url) throws IOException { + private RestClient buildClient(final String url) throws IOException { int portSeparator = url.lastIndexOf(':'); HttpHost httpHost = new HttpHost( url.substring(0, portSeparator), Integer.parseInt(url.substring(portSeparator + 1)), getProtocol() ); - return new HighLevelClient(buildClient(restAdminSettings(), new HttpHost[] { httpHost })); + return buildClient(restAdminSettings(), new HttpHost[] { httpHost }); } protected boolean isOss() { diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java index 9a0303ab60714..78ffb9cb7b7b6 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java @@ -7,13 +7,10 @@ */ package org.elasticsearch.cluster.remote.test; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.test.rest.ObjectPath; import org.junit.After; import org.junit.Before; @@ -28,44 +25,53 @@ public class RemoteClustersIT extends AbstractMultiClusterRemoteTestCase { @Before public void setupIndices() throws IOException { - RestClient cluster1Client = cluster1Client().getLowLevelClient(); - assertTrue(createIndex(cluster1Client, "test1", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); - cluster1Client().index( - new IndexRequest("test1").id("id1") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()), - RequestOptions.DEFAULT - ); - - RestClient cluster2Client = cluster2Client().getLowLevelClient(); - assertTrue(createIndex(cluster2Client, "test2", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); - cluster2Client().index( - new IndexRequest("test2").id("id1").source(XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()), - RequestOptions.DEFAULT - ); - cluster2Client().index( - new IndexRequest("test2").id("id2") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()), - RequestOptions.DEFAULT - ); - assertEquals(1L, cluster1Client().search(new SearchRequest("test1"), RequestOptions.DEFAULT).getHits().getTotalHits().value); - assertEquals(2L, cluster2Client().search(new SearchRequest("test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value); + assertTrue(createIndex(cluster1Client(), "test1", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); + { + Request createDoc = new Request("POST", "/test1/_doc/id1?refresh=true"); + createDoc.setJsonEntity(""" + { "foo": "bar" } + """); + assertOK(cluster1Client().performRequest(createDoc)); + } + { + Request searchRequest = new Request("POST", "/test1/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(1, (int) doc.evaluate("hits.total.value")); + } + + assertTrue(createIndex(cluster2Client(), "test2", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); + { + Request createDoc = new Request("POST", "/test2/_doc/id1?refresh=true"); + createDoc.setJsonEntity(""" + { "foo": "bar" } + """); + assertOK(cluster2Client().performRequest(createDoc)); + } + { + Request createDoc = new Request("POST", "/test2/_doc/id2?refresh=true"); + createDoc.setJsonEntity(""" + { "foo": "bar" } + """); + assertOK(cluster2Client().performRequest(createDoc)); + } + { + Request searchRequest = new Request("POST", "/test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster2Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } @After public void clearIndices() throws IOException { - RestClient cluster1Client = cluster1Client().getLowLevelClient(); - assertTrue(deleteIndex(cluster1Client, "*").isAcknowledged()); - RestClient cluster2Client = cluster2Client().getLowLevelClient(); - assertTrue(deleteIndex(cluster2Client, "*").isAcknowledged()); + assertTrue(deleteIndex(cluster1Client(), "*").isAcknowledged()); + assertTrue(deleteIndex(cluster2Client(), "*").isAcknowledged()); } @After public void clearRemoteClusterSettings() throws IOException { Settings setting = Settings.builder().putNull("cluster.remote.*").build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), setting); - updateClusterSettings(cluster2Client().getLowLevelClient(), setting); + updateClusterSettings(cluster1Client(), setting); + updateClusterSettings(cluster2Client(), setting); } public void testProxyModeConnectionWorks() throws IOException { @@ -76,14 +82,15 @@ public void testProxyModeConnectionWorks() throws IOException { .put("cluster.remote.cluster2.proxy_address", cluster2RemoteClusterSeed) .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertTrue(isConnected(cluster1Client().getLowLevelClient())); + assertTrue(isConnected(cluster1Client())); - assertEquals( - 2L, - cluster1Client().search(new SearchRequest("cluster2:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/cluster2:test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } public void testSniffModeConnectionFails() throws IOException { @@ -93,9 +100,9 @@ public void testSniffModeConnectionFails() throws IOException { .put("cluster.remote.cluster2alt.mode", "sniff") .put("cluster.remote.cluster2alt.seeds", cluster2RemoteClusterSeed) .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertFalse(isConnected(cluster1Client().getLowLevelClient())); + assertFalse(isConnected(cluster1Client())); } public void testHAProxyModeConnectionWorks() throws IOException { @@ -105,14 +112,15 @@ public void testHAProxyModeConnectionWorks() throws IOException { .put("cluster.remote.haproxynosn.mode", "proxy") .put("cluster.remote.haproxynosn.proxy_address", proxyAddress) .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertTrue(isConnected(cluster1Client().getLowLevelClient())); + assertTrue(isConnected(cluster1Client())); - assertEquals( - 2L, - cluster1Client().search(new SearchRequest("haproxynosn:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/haproxynosn:test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException { @@ -123,14 +131,15 @@ public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException .put("cluster.remote.haproxysni1.proxy_address", "haproxy:9600") .put("cluster.remote.haproxysni1.server_name", "application1.example.com") .build(); - updateClusterSettings(cluster2Client().getLowLevelClient(), settings); + updateClusterSettings(cluster2Client(), settings); - assertTrue(isConnected(cluster2Client().getLowLevelClient())); + assertTrue(isConnected(cluster2Client())); - assertEquals( - 1L, - cluster2Client().search(new SearchRequest("haproxysni1:test1"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/haproxysni1:test1/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster2Client().performRequest(searchRequest)); + assertEquals(1, (int) doc.evaluate("hits.total.value")); + } } public void testHAProxyModeConnectionWithSNIToCluster2Works() throws IOException { @@ -141,14 +150,15 @@ public void testHAProxyModeConnectionWithSNIToCluster2Works() throws IOException .put("cluster.remote.haproxysni2.proxy_address", "haproxy:9600") .put("cluster.remote.haproxysni2.server_name", "application2.example.com") .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertTrue(isConnected(cluster1Client().getLowLevelClient())); + assertTrue(isConnected(cluster1Client())); - assertEquals( - 2L, - cluster1Client().search(new SearchRequest("haproxysni2:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/haproxysni2:test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } @SuppressWarnings("unchecked") diff --git a/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml b/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml index 0c814fd0f9692..f8b1de5155527 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml +++ b/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml @@ -148,3 +148,41 @@ setup: _internal.get_desired_balance: { } - is_true: 'cluster_info' + +--- +"Test undesired_shard_allocation_count": + + - skip: + version: " - 8.11.99" + reason: "undesired_shard_allocation_count added in in 8.12.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + cluster.health: + index: test + wait_for_status: green + + - do: + cluster.state: {} + - set: { nodes._arbitrary_key_ : node_id } + - set: { nodes.$node_id.name : node_name } + + - do: + _internal.get_desired_balance: { } + + - gte: { 'cluster_balance_stats.shard_count' : 0 } + - gte: { 'cluster_balance_stats.undesired_shard_allocation_count' : 0 } + - gte: { 'cluster_balance_stats.nodes.$node_name.undesired_shard_allocation_count' : 0 } + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.total' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.min' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.max' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.average' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.std_dev' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml index 4f943abf1106a..8e1d3431069cf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml @@ -183,3 +183,41 @@ setup: - do: _internal.delete_desired_balance: { } + +--- +"Test undesired_shard_allocation_count": + + - skip: + version: " - 8.11.99" + reason: "undesired_shard_allocation_count added in in 8.12.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + cluster.health: + index: test + wait_for_status: green + + - do: + cluster.state: {} + - set: { nodes._arbitrary_key_ : node_id } + - set: { nodes.$node_id.name : node_name } + + - do: + _internal.get_desired_balance: { } + + - gte: { 'cluster_balance_stats.shard_count' : 0 } + - gte: { 'cluster_balance_stats.undesired_shard_allocation_count' : 0 } + - gte: { 'cluster_balance_stats.nodes.$node_name.undesired_shard_allocation_count' : 0 } + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.total' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.min' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.max' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.average' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.std_dev' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml index 51c12892c4859..890162787f04a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml @@ -492,3 +492,52 @@ index: test-generic - match: { test-generic.mappings.properties.field.type: "keyword" } - match: { test-generic.mappings.properties.field.ignore_above: 1024 } +--- +"Using deprecated component templates and pipelines in index template": + - skip: + version: ' - 8.11.99' + reason: 'The deprecated flags have been introduced in 8.12.0' + features: allowed_warnings + + - do: + cluster.put_component_template: + name: mapping + body: + template: + mappings: + properties: + field: + type: long + deprecated: true + + - do: + ingest.put_pipeline: + id: "my_deprecated_pipeline" + body: + deprecated: true + processors: [] + - match: { acknowledged: true } + + - do: + cluster.put_component_template: + name: setting + body: + template: + settings: + index: + default_pipeline: my_deprecated_pipeline + + - do: + allowed_warnings: + - "index template [test-composable-template] has index patterns [test-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-template] will take precedence during new index creation" + - "index template [test-composable-template] uses deprecated component template [mapping]" + - "index template [test-composable-template] uses deprecated ingest pipeline [my_deprecated_pipeline]" + indices.put_index_template: + name: test-composable-template + body: + index_patterns: + - test-* + composed_of: + - mapping + - setting + - is_true: acknowledged diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 1a03896f6d087..4607ae758b91f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -93,3 +93,50 @@ teardown: - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} +--- +"Test hybrid search with knn where automatically disables weighted mode": + - skip: + version: ' - 8.11.99' + reason: 'kNN was not correctly skipped until 8.12' + + - do: + indices.create: + index: test-highlighting-knn + body: + mappings: + "properties": + "vectors": + "type": "dense_vector" + "dims": 2 + "index": true + "similarity": "l2_norm" + "text": + "type": "text" + "fields": + "fvh": + "type": "text" + "term_vector": "with_positions_offsets" + "postings": + "type": "text" + "index_options": "offsets" + - do: + index: + index: test-highlighting-knn + id: "1" + body: + "text" : "The quick brown fox is brown." + "vectors": [1, 2] + - do: + indices.refresh: {} + + - do: + search: + index: test-highlighting-knn + body: { + "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, + "highlight": { "type": "unified", "fields": { "*": { } } }, + "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml new file mode 100644 index 0000000000000..849df86a30568 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml @@ -0,0 +1,274 @@ +# test how knn query interacts with filters +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + aliases: + my_alias: + filter: + term: + my_name: v2 + my_alias1: + filter: + term: + my_name: v1 + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + +--- +"Simple knn query": + + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 5 } # collector sees num_candidates docs + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0.fields.my_name.0: v1 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2.fields.my_name.0: v1 } +--- +"PRE_FILTER: knn query with alias filter as pre-filter": + - do: + search: + index: my_alias + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 5 } # collector sees num_candidates docs + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2.fields.my_name.0: v2 } + + # alias prefilter is combined with internal filter + - do: + search: + index: my_alias + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + filter: + term: + my_name: v1 + + # both alias filter and internal filter are applied as pre-filter resulting in 0 hits for knn search + - match: { hits.total.value: 0 } + - length: { hits.hits: 0 } + + # alias prefilter is applied when knn is a part of another query + - do: + search: + index: my_alias + body: + size: 3 + fields: [ my_name ] + query: + bool: + should: + - wildcard: + my_name: + value: "v*" + - knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 5 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2.fields.my_name.0: v2 } + +--- +"PRE_FILTER: pre-filter across multiple internal filters": +- do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + filter: + - term: + my_name: v1 + - term: + my_name: v2 +- match: { hits.total.value: 0 } +- length: { hits.hits: 0 } + +--- +"PRE_FILTER: pre-filter across multiple aliases": + - do: + search: + index: my_alias,my_alias1 + body: + size: 6 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 100 + + - match: { hits.total.value: 10 } # 5 docs from each alias + - length: {hits.hits: 6} + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0.fields.my_name.0: v1 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2.fields.my_name.0: v1 } + - match: { hits.hits.3._id: "4" } + - match: { hits.hits.3.fields.my_name.0: v2 } + - match: { hits.hits.4._id: "5" } + - match: { hits.hits.4.fields.my_name.0: v1 } + - match: { hits.hits.5._id: "6" } + - match: { hits.hits.5.fields.my_name.0: v2 } + +--- +"PRE_FILTER: knn query with internal filter as pre-filter": + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + filter: + term: + my_name: v2 + + - match: { hits.total.value: 5 } + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2.fields.my_name.0: v2 } + +--- +"POST_FILTER: knn query with filter from a parent bool query as post-filter": + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + bool: + must: + - term: + my_name: v2 + - knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 2 } + - length: {hits.hits: 2} # knn query returns top 5 docs, but they are post-filtered to 2 docs + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + bool: + must: + - term: + my_name: v2 + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + filter: + term: + my_name: v1 + + - match: { hits.total.value: 0} + - length: { hits.hits: 0 } # knn query returns top 5 docs, but they are post-filtered to 0 docs diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml new file mode 100644 index 0000000000000..b1c0fd948481b --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml @@ -0,0 +1,216 @@ +# test how knn query interacts with filters +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + features: close_to + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 2 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + - '{"index": {"_id": "11"}}' + - '{"my_vector": [1, 1, 1, 11], "my_name": "v1"}' + - '{"index": {"_id": "12"}}' + - '{"my_vector": [1, 1, 1, 12], "my_name": "v2"}' + + +--- +"Search for 2 knn queries combines scores from them": + - do: + search: + index: my_index + body: + size: 6 + query: + bool: + should: + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 100 + boost: 1.1 + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 12 ] + num_candidates: 100 + + - length: {hits.hits: 6} + - match: {hits.total.value: 12} + - match: {hits.hits.0._id: '1'} + - match: {hits.hits.1._id: '12'} + - match: {hits.hits.2._id: '2'} + - match: { hits.hits.3._id: '11' } + - match: { hits.hits.4._id: '3' } + - match: { hits.hits.5._id: '10' } + + +--- +"Hybrid search combines scores from knn and other queries": + - do: + search: + include_named_queries_score: true + index: my_index + body: + size: 3 + query: + bool: + should: + - wildcard: + my_name: + value: "v*" # produces scores 1.0 + _name: "bm25_query" + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 3 + _name: "knn_query" + + - length: {hits.hits: 3} + - match: {hits.total.value: 12} + - match: {hits.hits.0._id: '1'} + - match: {hits.hits.1._id: '2'} + - match: {hits.hits.2._id: '3'} + + - close_to: {hits.hits.0._score: { value: 2.0, error: 0.00001 } } + - close_to: {hits.hits.0.matched_queries.bm25_query: { value: 1.0, error: 0.00001 } } + - close_to: {hits.hits.0.matched_queries.knn_query: { value: 1.0, error: 0.00001 } } + + - close_to: {hits.hits.1._score: { value: 1.5, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.bm25_query: { value: 1.0, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.knn_query: { value: 0.5, error: 0.00001 } } + + - close_to: {hits.hits.2._score: { value: 1.2, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.bm25_query: { value: 1.0, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.knn_query: { value: 0.2, error: 0.00001 } } + + # the same query with boosts + - do: + search: + include_named_queries_score: true + index: my_index + body: + size: 3 + query: + bool: + should: + - wildcard: + my_name: + value: "v*" # produces scores 1.0 + boost: 100 + _name: "bm25_query" + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 3 + boost: 100 + _name: "knn_query" + + - length: { hits.hits: 3 } + - match: { hits.total.value: 12 } + - match: { hits.hits.0._id: '1' } + - match: { hits.hits.1._id: '2' } + - match: { hits.hits.2._id: '3' } + + - close_to: { hits.hits.0._score: { value: 200.0, error: 0.00001 } } + - close_to: { hits.hits.0.matched_queries.bm25_query: { value: 100.0, error: 0.00001 } } + - close_to: { hits.hits.0.matched_queries.knn_query: { value: 100.0, error: 0.00001 } } + + - close_to: { hits.hits.1._score: { value: 150.0, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.bm25_query: { value: 100.0, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.knn_query: { value: 50.0, error: 0.00001 } } + + - close_to: { hits.hits.2._score: { value: 120, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.bm25_query: { value: 100.0, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.knn_query: { value: 20.0, error: 0.00001 } } + +--- +"Aggregations with collected number of docs depends on num_candidates": + - do: + search: + index: my_index + body: + size: 2 + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 100 # collect up to 100 candidates from each shard + aggs: + my_agg: + terms: + field: my_name + order: + _key: asc + + - length: {hits.hits: 2} + - match: {hits.total.value: 12} + - match: {aggregations.my_agg.buckets.0.key: 'v1'} + - match: {aggregations.my_agg.buckets.1.key: 'v2'} + - match: {aggregations.my_agg.buckets.0.doc_count: 6} + - match: {aggregations.my_agg.buckets.1.doc_count: 6} + + - do: + search: + index: my_index + body: + size: 2 + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 3 # collect 3 candidates from each shard + aggs: + my_agg2: + terms: + field: my_name + order: + _key: asc + my_sum_buckets: + sum_bucket: + buckets_path: "my_agg2>_count" + + - length: { hits.hits: 2 } + - match: { hits.total.value: 6 } + - match: { aggregations.my_agg2.buckets.0.key: 'v1' } + - match: { aggregations.my_agg2.buckets.1.key: 'v2' } + - match: { aggregations.my_sum_buckets.value: 6.0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml new file mode 100644 index 0000000000000..435291b454d08 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml @@ -0,0 +1,213 @@ +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + - do: + indices.create: + index: test + body: + settings: + index: + number_of_shards: 1 + mappings: + properties: + name: + type: keyword + nested: + type: nested + properties: + paragraph_id: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + aliases: + my_alias: + filter: + term: + name: "rabbit.jpg" + + - do: + index: + index: test + id: "1" + body: + name: cow.jpg + nested: + - paragraph_id: 0 + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + - paragraph_id: 1 + vector: [240.0, 300, -3, 1, -20] + + - do: + index: + index: test + id: "2" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [-0.5, 100.0, -13, 14.8, -156.0] + - paragraph_id: 2 + vector: [0, 100.0, 0, 14.8, -156.0] + - paragraph_id: 3 + vector: [0, 1.0, 0, 1.8, -15.0] + + - do: + index: + index: test + id: "3" + body: + name: rabbit.jpg + nested: + - paragraph_id: 0 + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + +--- +"nested kNN search that returns diverse parents docs": + - do: + search: + index: test + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} + + - do: + search: + index: test + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 3 + inner_hits: { size: 1, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 3} + + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.name.0: "moose.jpg" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.1.fields.name.0: "rabbit.jpg" } + - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2.fields.name.0: "cow.jpg" } + - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + +--- +"nested kNN search pre-filtered on alias with filter on top level fields": + - do: + search: + index: my_alias # filter on name: "rabbit.jpg" + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 1 + inner_hits: { size: 1, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 1} # as alias is passed as pre-filter, we get a single result + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + +--- +"nested kNN search post-filtered on top level fields": + - do: + search: + index: test + body: + fields: [ "name" ] + query: + bool: + must: + - term: + name: "rabbit.jpg" + - nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 1 + - match: { hits.total.value: 0 } # no hits because returned single vector did not pass post-filter + + - do: + search: + index: test + body: + fields: [ "name" ] + query: + bool: + must: + - term: + name: "rabbit.jpg" + - nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 3 + inner_hits: { size: 1, fields: [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } +--- + +"nested kNN search post-filtered on nested fields DOES NOT work": + - do: + search: + index: test + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + bool: + must: + - term: + nested.paragraph_id: 3 + - knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 6 + inner_hits: { size: 1, "fields": [ "nested.paragraph_id" ], _source: false } + # no hits because, regardless of num_candidates knn returns top 3 child vectors from distinct parents + # and they don't pass the post-filter + # TODO: fix it on Lucene level so nested knn respects num_candidates + # or do pre-filtering + - match: {hits.total.value: 0} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml new file mode 100644 index 0000000000000..8f52a72cce01e --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -0,0 +1,127 @@ +# test how knn query interact with other queries +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + features: close_to + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + aliases: + my_alias: + filter: + term: + my_name: v2 + my_alias1: + filter: + term: + my_name: v1 + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + +--- +"Function score query with knn query": + # find top 5 knn docs, then boost docs with name v1 by 10 and docs with name v2 by 100 + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + function_score: + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + functions: + - filter: { match: { my_name: v1 } } + weight: 10 + - filter: { match: { my_name: v2 } } + weight: 100 + boost_mode: multiply + + - match: { hits.total.value: 5 } # collector sees num_candidates docs + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - close_to: { hits.hits.0._score: { value: 50.0, error: 0.001 } } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1.fields.my_name.0: v1 } + - close_to: { hits.hits.1._score: { value: 10.0, error: 0.001 } } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.fields.my_name.0: v2 } + - close_to: { hits.hits.2._score: { value: 10.0, error: 0.001 } } + +--- +"dis_max query with knn query": + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + dis_max: + queries: + - knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], num_candidates: 5 } + - match: { my_name: v2 } + tie_breaker: 0.8 + + - match: { hits.total.value: 8 } # 5 knn results + extra results from match query + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1.fields.my_name.0: v1 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.fields.my_name.0: v2 } + - match: { hits.hits.3._id: "6" } + - match: { hits.hits.3.fields.my_name.0: v2 } + - match: { hits.hits.4._id: "8" } + - match: { hits.hits.4.fields.my_name.0: v2 } + - match: { hits.hits.5._id: "10" } + - match: { hits.hits.5.fields.my_name.0: v2 } + - match: { hits.hits.6._id: "3" } + - match: { hits.hits.6.fields.my_name.0: v1 } + - match: { hits.hits.7._id: "5" } + - match: { hits.hits.7.fields.my_name.0: v1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 340cd8f8d0f70..57f8603f1e06e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -294,23 +294,6 @@ setup: - match: { error.root_cause.0.reason: "failed to create query: field [nonexistent] does not exist in the mapping" } --- -"Direct kNN queries are disallowed": - - skip: - version: ' - 8.3.99' - reason: 'error message changed in 8.4' - - do: - catch: bad_request - search: - index: test-index - body: - query: - knn: - field: vector - query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] - num_candidates: 1 - - match: { error.root_cause.0.type: "illegal_argument_exception" } - - match: { error.root_cause.0.reason: "[knn] queries cannot be provided directly, use the [knn] body parameter instead" } ---- "KNN Vector similarity search only": - skip: version: ' - 8.7.99' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml index 873b6d87cac66..ea21bb69a77b8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml @@ -163,20 +163,6 @@ setup: - match: { error.root_cause.0.reason: "failed to create query: field [nonexistent] does not exist in the mapping" } --- -"Direct kNN queries are disallowed": - - do: - catch: bad_request - search: - index: test - body: - query: - knn: - field: vector - query_vector: [ -1, 0, 1, 2, 3 ] - num_candidates: 1 - - match: { error.root_cause.0.type: "illegal_argument_exception" } - - match: { error.root_cause.0.reason: "[knn] queries cannot be provided directly, use the [knn] body parameter instead" } ---- "Vector similarity search only": - skip: version: ' - 8.7.99' diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index e3ea54f382c0a..4b395ec6856e5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -41,6 +40,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; @@ -270,12 +270,14 @@ public void onFailure(Exception e) { // we only really assert that we never reuse segments of old indices or anything like this here and that nothing fails with // crazy exceptions - SearchResponse expected = prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)) - .get(); - SearchResponse all = prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get(); - assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); - logger.info("total: {}", expected.getHits().getTotalHits().value); + assertNoFailuresAndResponse( + prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)), + expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> { + assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); + logger.info("total: {}", expected.getHits().getTotalHits().value); + }) + ); } public void testRestartIndexCreationAfterFullClusterRestart() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index d010c8b0cd74f..f55ac7172266d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -38,7 +38,7 @@ @ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, minNumDataNodes = 2) public class AwarenessAllocationIT extends ESIntegTestCase { - private final Logger logger = LogManager.getLogger(AwarenessAllocationIT.class); + private static final Logger logger = LogManager.getLogger(AwarenessAllocationIT.class); @Override protected int numberOfReplicas() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index bd3a545d7ed77..6175395803e88 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -9,8 +9,6 @@ package org.elasticsearch.cluster.allocation; import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; @@ -66,7 +64,6 @@ @LuceneTestCase.SuppressFileSystems(value = "WindowsFS") @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class ClusterRerouteIT extends ESIntegTestCase { - private final Logger logger = LogManager.getLogger(ClusterRerouteIT.class); public void testRerouteWithCommands_disableAllocationSettings() throws Exception { Settings commonSettings = Settings.builder() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java index 3a2c6b5ebd0f7..80bba57270aa5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java @@ -193,7 +193,7 @@ public void testDeleteCreateInOneBulk() throws Exception { refresh(); disruption.startDisrupting(); logger.info("--> delete index"); - executeAndCancelCommittedPublication(indicesAdmin().prepareDelete("test").setTimeout("0s")).get(10, TimeUnit.SECONDS); + executeAndCancelCommittedPublication(indicesAdmin().prepareDelete("test").setTimeout("0s")).get(30, TimeUnit.SECONDS); logger.info("--> and recreate it"); executeAndCancelCommittedPublication( prepareCreate("test").setSettings( @@ -201,7 +201,7 @@ public void testDeleteCreateInOneBulk() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey(), "0") ).setTimeout("0s") - ).get(10, TimeUnit.SECONDS); + ).get(30, TimeUnit.SECONDS); logger.info("--> letting cluster proceed"); @@ -295,7 +295,7 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // Now make sure the indexing request finishes successfully disruption.stopDisrupting(); - assertTrue(putMappingResponse.get(10, TimeUnit.SECONDS).isAcknowledged()); + assertTrue(putMappingResponse.get(30, TimeUnit.SECONDS).isAcknowledged()); assertThat(docIndexResponse.get(10, TimeUnit.SECONDS), instanceOf(IndexResponse.class)); assertEquals(1, docIndexResponse.get(10, TimeUnit.SECONDS).getShardInfo().getTotal()); } @@ -408,11 +408,11 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { // Now make sure the indexing request finishes successfully disruption.stopDisrupting(); - assertTrue(putMappingResponse.get(10, TimeUnit.SECONDS).isAcknowledged()); + assertTrue(putMappingResponse.get(30, TimeUnit.SECONDS).isAcknowledged()); assertThat(docIndexResponse.get(10, TimeUnit.SECONDS), instanceOf(IndexResponse.class)); assertEquals(2, docIndexResponse.get(10, TimeUnit.SECONDS).getShardInfo().getTotal()); // both shards should have succeeded - assertThat(dynamicMappingsFut.get(10, TimeUnit.SECONDS).getResult(), equalTo(CREATED)); + assertThat(dynamicMappingsFut.get(30, TimeUnit.SECONDS).getResult(), equalTo(CREATED)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index e995d815af0f1..d92664f55416a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -8,8 +8,6 @@ package org.elasticsearch.gateway; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; @@ -69,8 +67,6 @@ @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class GatewayIndexStateIT extends ESIntegTestCase { - private final Logger logger = LogManager.getLogger(GatewayIndexStateIT.class); - @Override protected boolean addMockInternalEngine() { // testRecoverBrokenIndexMetadata replies on the flushing on shutdown behavior which can be randomly disabled in MockInternalEngine. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index 9b763ea581187..dd22f50ab420b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -8,8 +8,6 @@ package org.elasticsearch.indices.state; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.support.ActiveShardCount; @@ -27,8 +25,6 @@ @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class SimpleIndexStateIT extends ESIntegTestCase { - private final Logger logger = LogManager.getLogger(SimpleIndexStateIT.class); - public void testSimpleOpenClose() { logger.info("--> creating test index"); createIndex("test"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index bfd16adaa405b..fceeb2013b7c5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -8,8 +8,6 @@ package org.elasticsearch.recovery; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; @@ -47,7 +45,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { - private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class); public static final class RetentionLeaseSyncIntervalSettingPlugin extends Plugin { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 05fa9dc66928c..345504582305a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.search; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.MultiSearchAction; @@ -50,6 +51,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101739") @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class SearchCancellationIT extends AbstractSearchCancellationTestCase { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java index 8b7f566750042..ba20e86237530 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -31,6 +30,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.closeTo; @ESIntegTestCase.SuiteScopeTestCase @@ -54,161 +54,186 @@ protected void setupSuiteScopeCluster() throws Exception { } public void testUnmappedTerms() { - SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("non_existing_field").missing("bar")).get(); - assertNoFailures(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(1, terms.getBuckets().size()); - assertEquals(2, terms.getBucketByKey("bar").getDocCount()); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(terms("my_terms").field("non_existing_field").missing("bar")), + response -> { + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(2, terms.getBucketByKey("bar").getDocCount()); + } + ); } public void testStringTerms() { for (ExecutionMode mode : ExecutionMode.values()) { - SearchResponse response = prepareSearch("idx").addAggregation( - terms("my_terms").field("str").executionHint(mode.toString()).missing("bar") - ).get(); - assertNoFailures(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(2, terms.getBuckets().size()); - assertEquals(1, terms.getBucketByKey("foo").getDocCount()); - assertEquals(1, terms.getBucketByKey("bar").getDocCount()); - - response = prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); - assertNoFailures(response); - terms = response.getAggregations().get("my_terms"); - assertEquals(1, terms.getBuckets().size()); - assertEquals(2, terms.getBucketByKey("foo").getDocCount()); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(terms("my_terms").field("str").executionHint(mode.toString()).missing("bar")), + response -> { + assertNoFailures(response); + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(1, terms.getBucketByKey("foo").getDocCount()); + assertEquals(1, terms.getBucketByKey("bar").getDocCount()); + } + ); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")), response -> { + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(2, terms.getBucketByKey("foo").getDocCount()); + }); } } public void testLongTerms() { - SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)).get(); - assertNoFailures(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(2, terms.getBuckets().size()); - assertEquals(1, terms.getBucketByKey("3").getDocCount()); - assertEquals(1, terms.getBucketByKey("4").getDocCount()); - - response = prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)).get(); - assertNoFailures(response); - terms = response.getAggregations().get("my_terms"); - assertEquals(1, terms.getBuckets().size()); - assertEquals(2, terms.getBucketByKey("3").getDocCount()); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)), response -> { + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(1, terms.getBucketByKey("3").getDocCount()); + assertEquals(1, terms.getBucketByKey("4").getDocCount()); + }); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)), response -> { + assertNoFailures(response); + Terms terms2 = response.getAggregations().get("my_terms"); + assertEquals(1, terms2.getBuckets().size()); + assertEquals(2, terms2.getBucketByKey("3").getDocCount()); + }); } public void testDoubleTerms() { - SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)).get(); - assertNoFailures(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(2, terms.getBuckets().size()); - assertEquals(1, terms.getBucketByKey("4.5").getDocCount()); - assertEquals(1, terms.getBucketByKey("5.5").getDocCount()); - - response = prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(5.5)).get(); - assertNoFailures(response); - terms = response.getAggregations().get("my_terms"); - assertEquals(1, terms.getBuckets().size()); - assertEquals(2, terms.getBucketByKey("5.5").getDocCount()); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)), response -> { + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(1, terms.getBucketByKey("4.5").getDocCount()); + assertEquals(1, terms.getBucketByKey("5.5").getDocCount()); + }); + + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(5.5)), response -> { + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(2, terms.getBucketByKey("5.5").getDocCount()); + }); } public void testUnmappedHistogram() { - SearchResponse response = prepareSearch("idx").addAggregation( - histogram("my_histogram").field("non-existing_field").interval(5).missing(12) - ).get(); - assertNoFailures(response); - Histogram histogram = response.getAggregations().get("my_histogram"); - assertEquals(1, histogram.getBuckets().size()); - assertEquals(10d, histogram.getBuckets().get(0).getKey()); - assertEquals(2, histogram.getBuckets().get(0).getDocCount()); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(histogram("my_histogram").field("non-existing_field").interval(5).missing(12)), + response -> { + Histogram histogram = response.getAggregations().get("my_histogram"); + assertEquals(1, histogram.getBuckets().size()); + assertEquals(10d, histogram.getBuckets().get(0).getKey()); + assertEquals(2, histogram.getBuckets().get(0).getDocCount()); + } + ); } public void testHistogram() { - SearchResponse response = prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)).get(); - assertNoFailures(response); - Histogram histogram = response.getAggregations().get("my_histogram"); - assertEquals(2, histogram.getBuckets().size()); - assertEquals(0d, histogram.getBuckets().get(0).getKey()); - assertEquals(1, histogram.getBuckets().get(0).getDocCount()); - assertEquals(5d, histogram.getBuckets().get(1).getKey()); - assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - - response = prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get(); - assertNoFailures(response); - histogram = response.getAggregations().get("my_histogram"); - assertEquals(1, histogram.getBuckets().size()); - assertEquals(0d, histogram.getBuckets().get(0).getKey()); - assertEquals(2, histogram.getBuckets().get(0).getDocCount()); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)), + response -> { + Histogram histogram = response.getAggregations().get("my_histogram"); + assertEquals(2, histogram.getBuckets().size()); + assertEquals(0d, histogram.getBuckets().get(0).getKey()); + assertEquals(1, histogram.getBuckets().get(0).getDocCount()); + assertEquals(5d, histogram.getBuckets().get(1).getKey()); + assertEquals(1, histogram.getBuckets().get(1).getDocCount()); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)), + response -> { + Histogram histogram = response.getAggregations().get("my_histogram"); + assertEquals(1, histogram.getBuckets().size()); + assertEquals(0d, histogram.getBuckets().get(0).getKey()); + assertEquals(2, histogram.getBuckets().get(0).getDocCount()); + } + ); } public void testDateHistogram() { - SearchResponse response = prepareSearch("idx").addAggregation( - dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07") - ).get(); - assertNoFailures(response); - Histogram histogram = response.getAggregations().get("my_histogram"); - assertEquals(2, histogram.getBuckets().size()); - assertEquals("2014-01-01T00:00:00.000Z", histogram.getBuckets().get(0).getKeyAsString()); - assertEquals(1, histogram.getBuckets().get(0).getDocCount()); - assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(1).getKeyAsString()); - assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - - response = prepareSearch("idx").addAggregation( - dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07") - ).get(); - assertNoFailures(response); - histogram = response.getAggregations().get("my_histogram"); - assertEquals(1, histogram.getBuckets().size()); - assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(0).getKeyAsString()); - assertEquals(2, histogram.getBuckets().get(0).getDocCount()); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( + dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07") + ), + response -> { + Histogram histogram = response.getAggregations().get("my_histogram"); + assertEquals(2, histogram.getBuckets().size()); + assertEquals("2014-01-01T00:00:00.000Z", histogram.getBuckets().get(0).getKeyAsString()); + assertEquals(1, histogram.getBuckets().get(0).getDocCount()); + assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(1).getKeyAsString()); + assertEquals(1, histogram.getBuckets().get(1).getDocCount()); + } + ); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( + dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07") + ), + response -> { + Histogram histogram = response.getAggregations().get("my_histogram"); + assertEquals(1, histogram.getBuckets().size()); + assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(0).getKeyAsString()); + assertEquals(2, histogram.getBuckets().get(0).getDocCount()); + } + ); } public void testCardinality() { - SearchResponse response = prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)).get(); - assertNoFailures(response); - Cardinality cardinality = response.getAggregations().get("card"); - assertEquals(2, cardinality.getValue()); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)), response -> { + Cardinality cardinality = response.getAggregations().get("card"); + assertEquals(2, cardinality.getValue()); + }); } public void testPercentiles() { - SearchResponse response = prepareSearch("idx").addAggregation(percentiles("percentiles").field("long").missing(1000)).get(); - assertNoFailures(response); - Percentiles percentiles = response.getAggregations().get("percentiles"); - assertEquals(1000, percentiles.percentile(100), 0); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(percentiles("percentiles").field("long").missing(1000)), + response -> { + Percentiles percentiles = response.getAggregations().get("percentiles"); + assertEquals(1000, percentiles.percentile(100), 0); + } + ); } public void testStats() { - SearchResponse response = prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)).get(); - assertNoFailures(response); - Stats stats = response.getAggregations().get("stats"); - assertEquals(2, stats.getCount()); - assertEquals(4, stats.getAvg(), 0); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)), response -> { + Stats stats = response.getAggregations().get("stats"); + assertEquals(2, stats.getCount()); + assertEquals(4, stats.getAvg(), 0); + }); } public void testUnmappedGeoBounds() { - SearchResponse response = prepareSearch("idx").addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")).get(); - assertNoFailures(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")), + response -> { + GeoBounds bounds = response.getAggregations().get("bounds"); + assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); + assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5)); + assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); + assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + } + ); } public void testGeoBounds() { - SearchResponse response = prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get(); - assertNoFailures(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")), response -> { + GeoBounds bounds = response.getAggregations().get("bounds"); + assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); + assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5)); + assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); + assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + }); } public void testGeoCentroid() { - SearchResponse response = prepareSearch("idx").addAggregation(geoCentroid("centroid").field("location").missing("2,1")).get(); - assertNoFailures(response); - GeoCentroid centroid = response.getAggregations().get("centroid"); - GeoPoint point = new GeoPoint(1.5, 1.5); - assertThat(point.getY(), closeTo(centroid.centroid().getY(), 1E-5)); - assertThat(point.getX(), closeTo(centroid.centroid().getX(), 1E-5)); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(geoCentroid("centroid").field("location").missing("2,1")), + response -> { + GeoCentroid centroid = response.getAggregations().get("centroid"); + GeoPoint point = new GeoPoint(1.5, 1.5); + assertThat(point.getY(), closeTo(centroid.centroid().getY(), 1E-5)); + assertThat(point.getX(), closeTo(centroid.centroid().getX(), 1E-5)); + } + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index 0e430c9618bc8..db06eb1b5de0b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.sort; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; @@ -41,6 +40,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; @@ -177,40 +177,43 @@ public void testSimpleSorts() throws Exception { refresh(); // STRING script - int size = 1 + random.nextInt(10); - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", Collections.emptyMap()); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(size) - .addSort(new ScriptSortBuilder(script, ScriptSortType.STRING)) - .get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat(searchHit.getId(), equalTo(Integer.toString(i))); - - String expected = new String(new char[] { (char) (97 + i), (char) (97 + i) }); - assertThat(searchHit.getSortValues()[0].toString(), equalTo(expected)); - } - - size = 1 + random.nextInt(10); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).get(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().getHits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat(searchHit.getId(), equalTo(Integer.toString(9 - i))); + final int sizeFirstRequest = 1 + random.nextInt(10); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .setSize(sizeFirstRequest) + .addSort(new ScriptSortBuilder(script, ScriptSortType.STRING)), + searchResponse -> { + assertHitCount(searchResponse, 10); + assertThat(searchResponse.getHits().getHits().length, equalTo(sizeFirstRequest)); + for (int i = 0; i < sizeFirstRequest; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat(searchHit.getId(), equalTo(Integer.toString(i))); + + String expected = new String(new char[] { (char) (97 + i), (char) (97 + i) }); + assertThat(searchHit.getSortValues()[0].toString(), equalTo(expected)); + } + } + ); - String expected = new String(new char[] { (char) (97 + (9 - i)), (char) (97 + (9 - i)) }); - assertThat(searchHit.getSortValues()[0].toString(), equalTo(expected)); - } + final int sizeSecondRequest = 1 + random.nextInt(10); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(sizeSecondRequest).addSort("str_value", SortOrder.DESC), + searchResponse -> { + assertHitCount(searchResponse, 10); + assertThat(searchResponse.getHits().getHits().length, equalTo(sizeSecondRequest)); + for (int i = 0; i < sizeSecondRequest; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat(searchHit.getId(), equalTo(Integer.toString(9 - i))); + + String expected = new String(new char[] { (char) (97 + (9 - i)), (char) (97 + (9 - i)) }); + assertThat(searchHit.getSortValues()[0].toString(), equalTo(expected)); + } - assertThat(searchResponse.toString(), not(containsString("error"))); - assertNoFailures(searchResponse); + assertThat(searchResponse.toString(), not(containsString("error"))); + } + ); } public void testSortMinValueScript() throws IOException { @@ -260,64 +263,71 @@ public void testSortMinValueScript() throws IOException { indicesAdmin().prepareRefresh("test").get(); // test the long values - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min long", Collections.emptyMap())) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) - .setSize(10) - .get(); - - assertNoFailures(searchResponse); - - assertHitCount(searchResponse, 20L); - for (int i = 0; i < 10; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), equalTo((long) i)); - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min long", Collections.emptyMap())) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10), + searchResponse -> { + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), equalTo((long) i)); + } + } + ); // test the double values - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min double", Collections.emptyMap())) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) - .setSize(10) - .get(); - - assertNoFailures(searchResponse); - - assertHitCount(searchResponse, 20L); - for (int i = 0; i < 10; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), equalTo((double) i)); - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min double", Collections.emptyMap())) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10), + searchResponse -> { + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), equalTo((double) i)); + } + } + ); // test the string values - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min string", Collections.emptyMap())) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) - .setSize(10) - .get(); - - assertNoFailures(searchResponse); - - assertHitCount(searchResponse, 20L); - for (int i = 0; i < 10; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), equalTo(i)); - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min string", Collections.emptyMap())) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10), + searchResponse -> { + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), equalTo(i)); + } + } + ); // test the geopoint values - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min geopoint lon", Collections.emptyMap())) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) - .setSize(10) - .get(); - - assertNoFailures(searchResponse); - - assertHitCount(searchResponse, 20L); - for (int i = 0; i < 10; i++) { - SearchHit searchHit = searchResponse.getHits().getAt(i); - assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").getValue(), closeTo(i, GeoUtils.TOLERANCE)); - } + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addScriptField( + "min", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min geopoint lon", Collections.emptyMap()) + ) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10), + searchResponse -> { + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat( + "res: " + i + " id: " + searchHit.getId(), + searchHit.field("min").getValue(), + closeTo(i, GeoUtils.TOLERANCE) + ); + } + } + ); } public void testDocumentsWithNullValue() throws Exception { @@ -350,61 +360,60 @@ public void testDocumentsWithNullValue() throws Exception { Script scripField = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'].value", Collections.emptyMap()); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addScriptField("id", scripField) - .addSort("svalue", SortOrder.ASC) - .get(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); - - searchResponse = prepareSearch().setQuery(matchAllQuery()) - .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) - .addSort("svalue", SortOrder.ASC) - .get(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); - - searchResponse = prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.DESC).get(); - - if (searchResponse.getFailedShards() > 0) { - logger.warn("Failed shards:"); - for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { - logger.warn("-> {}", shardSearchFailure); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.ASC), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); + assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); } - } - assertThat(searchResponse.getFailedShards(), equalTo(0)); + ); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()) + .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) + .addSort("svalue", SortOrder.ASC), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); + assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); + } + ); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); + assertNoFailuresAndResponse( + prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.DESC), + searchResponse -> { + if (searchResponse.getFailedShards() > 0) { + logger.warn("Failed shards:"); + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + logger.warn("-> {}", shardSearchFailure); + } + } + assertThat(searchResponse.getFailedShards(), equalTo(0)); - // a query with docs just with null values - searchResponse = prepareSearch().setQuery(termQuery("id", "2")) - .addScriptField("id", scripField) - .addSort("svalue", SortOrder.DESC) - .get(); - - if (searchResponse.getFailedShards() > 0) { - logger.warn("Failed shards:"); - for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { - logger.warn("-> {}", shardSearchFailure); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); + assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); } - } - assertThat(searchResponse.getFailedShards(), equalTo(0)); + ); + // a query with docs just with null values + assertNoFailuresAndResponse( + prepareSearch().setQuery(termQuery("id", "2")).addScriptField("id", scripField).addSort("svalue", SortOrder.DESC), + searchResponse -> { + if (searchResponse.getFailedShards() > 0) { + logger.warn("Failed shards:"); + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + logger.warn("-> {}", shardSearchFailure); + } + } + assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); + } + ); } public void test2920() throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java index e3bd85440c535..bd14f913b10ef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java @@ -78,7 +78,7 @@ public void run() { final var shardStatuses = snapshotShardsService.currentSnapshotShards(snapshot); assertEquals(1, shardStatuses.size()); - final var shardStatus = shardStatuses.get(new ShardId(index, 0)).asCopy(); + final var shardStatus = shardStatuses.get(new ShardId(index, 0)); logger.info("--> {}", shardStatus); if (i == 0) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index d68301a310722..ca522064e3d04 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -1066,7 +1066,6 @@ public void testEquivalentDeletesAreDeduplicated() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99355") public void testMasterFailoverOnFinalizationLoop() throws Exception { internalCluster().startMasterOnlyNodes(3); final String dataNode = internalCluster().startDataOnlyNode(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java index ee955da01f4af..b2494c5bd2b91 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java @@ -71,7 +71,7 @@ public void testRetryPostingSnapshotStatusMessages() throws Exception { List stages = snapshotShardsService.currentSnapshotShards(snapshot) .values() .stream() - .map(status -> status.asCopy().getStage()) + .map(IndexShardSnapshotStatus.Copy::getStage) .toList(); assertThat(stages, hasSize(shards)); assertThat(stages, everyItem(equalTo(IndexShardSnapshotStatus.Stage.DONE))); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 8e636a93e4f0b..65792ebcccc64 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -406,7 +406,8 @@ provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.features.FeaturesSupportedSpecification, - org.elasticsearch.health.HealthFeature; + org.elasticsearch.health.HealthFeature, + org.elasticsearch.rest.RestFeatures; uses org.elasticsearch.plugins.internal.SettingsExtension; uses RestExtension; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d09be93772e78..0c7145730e447 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -157,6 +157,10 @@ static TransportVersion def(int id) { public static final TransportVersion CLUSTER_FEATURES_ADDED = def(8_526_00_0); public static final TransportVersion DSL_ERROR_STORE_INFORMATION_ENHANCED = def(8_527_00_0); public static final TransportVersion INVALID_BUCKET_PATH_EXCEPTION_INTRODUCED = def(8_528_00_0); + public static final TransportVersion KNN_AS_QUERY_ADDED = def(8_529_00_0); + public static final TransportVersion UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED = def(8_530_00_0); + public static final TransportVersion ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED = def(8_531_00_0); + public static final TransportVersion DEPRECATED_COMPONENT_TEMPLATES_ADDED = def(8_532_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 6ac451d5bc93b..a855b6b8ee7e3 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -282,6 +282,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards; import org.elasticsearch.health.GetHealthAction; import org.elasticsearch.health.RestGetHealthAction; @@ -857,7 +858,7 @@ private static ActionFilters setupActionFilters(List actionPlugins return new ActionFilters(Set.copyOf(finalFilters)); } - public void initRestHandlers(Supplier nodesInCluster) { + public void initRestHandlers(Supplier nodesInCluster, Predicate clusterSupportsFeature) { List catActions = new ArrayList<>(); Predicate catActionsFilter = restExtension.getCatActionsFilter(); Predicate restFilter = restExtension.getActionsFilter(); @@ -889,7 +890,7 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestClusterStateAction(settingsFilter, threadPool)); registerHandler.accept(new RestClusterHealthAction()); registerHandler.accept(new RestClusterUpdateSettingsAction()); - registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, nodesInCluster)); + registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, clusterSupportsFeature)); registerHandler.accept(new RestClusterRerouteAction(settingsFilter)); registerHandler.accept(new RestClusterSearchShardsAction()); registerHandler.accept(new RestPendingClusterTasksAction()); diff --git a/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java b/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java index bb4eb6c202b76..e018cf48fcefc 100644 --- a/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java +++ b/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.io.PrintWriter; -public class NoShardAvailableActionException extends ElasticsearchException { +public final class NoShardAvailableActionException extends ElasticsearchException { private static final StackTraceElement[] EMPTY_STACK_TRACE = new StackTraceElement[0]; @@ -28,22 +28,18 @@ public static NoShardAvailableActionException forOnShardFailureWrapper(String ms return new NoShardAvailableActionException(null, msg, null, true); } - @SuppressWarnings("this-escape") public NoShardAvailableActionException(ShardId shardId) { this(shardId, null, null, false); } - @SuppressWarnings("this-escape") public NoShardAvailableActionException(ShardId shardId, String msg) { this(shardId, msg, null, false); } - @SuppressWarnings("this-escape") public NoShardAvailableActionException(ShardId shardId, String msg, Throwable cause) { this(shardId, msg, cause, false); } - @SuppressWarnings("this-escape") private NoShardAvailableActionException(ShardId shardId, String msg, Throwable cause, boolean onShardFailureWrapper) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java b/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java index a90bc14f9ac8d..0999e7154b058 100644 --- a/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java +++ b/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java @@ -18,11 +18,10 @@ import java.io.IOException; import java.util.Objects; -public class RoutingMissingException extends ElasticsearchException { +public final class RoutingMissingException extends ElasticsearchException { private final String id; - @SuppressWarnings("this-escape") public RoutingMissingException(String index, String id) { super("routing is required for [" + index + "]/[" + id + "]"); Objects.requireNonNull(index, "index must not be null"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java index b585e891a5903..fc11790079521 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java @@ -95,7 +95,7 @@ protected void masterOperation( listener.onResponse( new DesiredBalanceResponse( desiredBalanceShardsAllocator.getStats(), - ClusterBalanceStats.createFrom(state, clusterInfo, writeLoadForecaster), + ClusterBalanceStats.createFrom(state, latestDesiredBalance, clusterInfo, writeLoadForecaster), createRoutingTable(state, latestDesiredBalance), clusterInfo ) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 0cf0baa75a8de..ebf01feaaa891 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -20,7 +20,7 @@ /** * A request to get node (cluster) level information. */ -public class NodesInfoRequest extends BaseNodesRequest { +public final class NodesInfoRequest extends BaseNodesRequest { private final NodesInfoMetrics nodesInfoMetrics; @@ -39,7 +39,6 @@ public NodesInfoRequest(StreamInput in) throws IOException { * Get information from nodes based on the nodes ids specified. If none are passed, information * for all nodes will be returned. */ - @SuppressWarnings("this-escape") public NodesInfoRequest(String... nodesIds) { super(nodesIds); nodesInfoMetrics = new NodesInfoMetrics(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 39205715dca8f..6f6253491c580 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -20,7 +20,9 @@ import java.io.IOException; import java.util.Objects; -public class ClusterSearchShardsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { +public final class ClusterSearchShardsRequest extends MasterNodeReadRequest + implements + IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; @Nullable @@ -31,7 +33,6 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest shardsStatus = snapshotShardsService.currentSnapshotShards(snapshot); + final var shardsStatus = snapshotShardsService.currentSnapshotShards(snapshot); if (shardsStatus == null) { continue; } Map shardMapBuilder = new HashMap<>(); - for (Map.Entry shardEntry : shardsStatus.entrySet()) { + for (final var shardEntry : shardsStatus.entrySet()) { final ShardId shardId = shardEntry.getKey(); - final IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardEntry.getValue().asCopy(); + final IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardEntry.getValue(); final IndexShardSnapshotStatus.Stage stage = lastSnapshotStatus.getStage(); String shardNodeId = null; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 2a6f0325be1d2..f8b9a9571ddd2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -243,7 +243,6 @@ private void buildResponse( entry.indices().get(shardId.getIndexName()), shardId ) - .asCopy() ); } else { shardStatus = new SnapshotIndexShardStatus(entry.shardId(shardEntry.getKey()), stage); @@ -322,7 +321,7 @@ private void loadRepositoryData( repositoriesService.repository(repositoryName) .getSnapshotInfo(new GetSnapshotInfoContext(snapshotIdsToLoad, true, task::isCancelled, (context, snapshotInfo) -> { List shardStatusBuilder = new ArrayList<>(); - final Map shardStatuses; + final Map shardStatuses; try { shardStatuses = snapshotShards(repositoryName, repositoryData, task, snapshotInfo); } catch (Exception e) { @@ -330,8 +329,8 @@ private void loadRepositoryData( context.onFailure(e); return; } - for (Map.Entry shardStatus : shardStatuses.entrySet()) { - IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardStatus.getValue().asCopy(); + for (final var shardStatus : shardStatuses.entrySet()) { + IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardStatus.getValue(); shardStatusBuilder.add(new SnapshotIndexShardStatus(shardStatus.getKey(), lastSnapshotStatus)); } final SnapshotsInProgress.State state = switch (snapshotInfo.state()) { @@ -374,14 +373,14 @@ private void loadRepositoryData( * @param snapshotInfo snapshot info * @return map of shard id to snapshot status */ - private Map snapshotShards( + private Map snapshotShards( final String repositoryName, final RepositoryData repositoryData, final CancellableTask task, final SnapshotInfo snapshotInfo ) throws IOException { final Repository repository = repositoriesService.repository(repositoryName); - final Map shardStatus = new HashMap<>(); + final Map shardStatus = new HashMap<>(); for (String index : snapshotInfo.indices()) { IndexId indexId = repositoryData.resolveIndexId(index); task.ensureNotCancelled(); @@ -394,7 +393,7 @@ private Map snapshotShards( if (shardFailure != null) { shardStatus.put(shardId, IndexShardSnapshotStatus.newFailed(shardFailure.reason())); } else { - final IndexShardSnapshotStatus shardSnapshotStatus; + final IndexShardSnapshotStatus.Copy shardSnapshotStatus; if (snapshotInfo.state() == SnapshotState.FAILED) { // If the snapshot failed, but the shard's snapshot does // not have an exception, it means that partial snapshots diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 531dc6dc5eff3..8a674292b3cc5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -59,7 +59,7 @@ public static class Fields { * A request to analyze a text associated with a specific index. Allow to provide * the actual analyzer name to perform the analysis with. */ - public static class Request extends SingleShardRequest { + public static final class Request extends SingleShardRequest { private String[] text; private String analyzer; @@ -91,7 +91,6 @@ public Request() {} * * @param index The text to analyze */ - @SuppressWarnings("this-escape") public Request(String index) { this.index(index); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java index d2df8e20f99ea..c8ecbf273c93c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java @@ -43,7 +43,7 @@ public class TransportVerifyShardBeforeCloseAction extends TransportReplicationA public static final String NAME = CloseIndexAction.NAME + "[s]"; public static final ActionType TYPE = new ActionType<>(NAME, ReplicationResponse::new); - protected Logger logger = LogManager.getLogger(getClass()); + private static final Logger logger = LogManager.getLogger(TransportVerifyShardBeforeCloseAction.class); @Inject public TransportVerifyShardBeforeCloseAction( @@ -163,7 +163,7 @@ public void markShardCopyAsStaleIfNeeded( } } - public static class ShardRequest extends ReplicationRequest { + public static final class ShardRequest extends ReplicationRequest { private final ClusterBlock clusterBlock; @@ -175,7 +175,6 @@ public static class ShardRequest extends ReplicationRequest { phase1 = in.readBoolean(); } - @SuppressWarnings("this-escape") public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, final boolean phase1, final TaskId parentTaskId) { super(shardId); this.clusterBlock = Objects.requireNonNull(clusterBlock); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java index aec5718b31a84..7daf04f41a9fb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java @@ -7,8 +7,6 @@ */ package org.elasticsearch.action.admin.indices.readonly; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; @@ -48,7 +46,6 @@ public class TransportVerifyShardIndexBlockAction extends TransportReplicationAc public static final String NAME = AddIndexBlockAction.NAME + "[s]"; public static final ActionType TYPE = new ActionType<>(NAME, ReplicationResponse::new); - protected Logger logger = LogManager.getLogger(getClass()); @Inject public TransportVerifyShardIndexBlockAction( @@ -157,7 +154,7 @@ public void markShardCopyAsStaleIfNeeded( } } - public static class ShardRequest extends ReplicationRequest { + public static final class ShardRequest extends ReplicationRequest { private final ClusterBlock clusterBlock; @@ -166,7 +163,6 @@ public static class ShardRequest extends ReplicationRequest { clusterBlock = new ClusterBlock(in); } - @SuppressWarnings("this-escape") public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, final TaskId parentTaskId) { super(shardId); this.clusterBlock = Objects.requireNonNull(clusterBlock); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index 30197d102dc47..5686deb6b804a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -8,8 +8,6 @@ package org.elasticsearch.action.admin.indices.shrink; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -44,7 +42,6 @@ * Main class to initiate resizing (shrink / split) an index into a new index */ public class TransportResizeAction extends TransportMasterNodeAction { - private static final Logger logger = LogManager.getLogger(TransportResizeAction.class); private final MetadataCreateIndexService createIndexService; private final Client client; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java index bccc7a8f7e243..391ac532a0c3a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java @@ -24,7 +24,7 @@ * The SHARD_LEVEL flags are for stat fields that can be calculated at the shard level and then may be later aggregated at the index level * along with index-level flag stat fields (e.g., Mappings). */ -public class CommonStatsFlags implements Writeable, Cloneable { +public final class CommonStatsFlags implements Writeable, Cloneable { public static final CommonStatsFlags ALL = new CommonStatsFlags().all(); public static final CommonStatsFlags SHARD_LEVEL = new CommonStatsFlags().all().set(Flag.Mappings, false); @@ -40,7 +40,6 @@ public class CommonStatsFlags implements Writeable, Cloneable { /** * @param flags flags to set. If no flags are supplied, default flags will be set. */ - @SuppressWarnings("this-escape") public CommonStatsFlags(Flag... flags) { if (flags.length > 0) { clear(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java index 4e1776a49d21c..335c0781fb884 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComponentTemplateAction.java @@ -78,7 +78,12 @@ public static ComponentTemplate normalizeComponentTemplate( Settings settings = builder.build(); indexScopedSettings.validate(settings, true); template = new Template(settings, template.mappings(), template.aliases(), template.lifecycle()); - componentTemplate = new ComponentTemplate(template, componentTemplate.version(), componentTemplate.metadata()); + componentTemplate = new ComponentTemplate( + template, + componentTemplate.version(), + componentTemplate.metadata(), + componentTemplate.deprecated() + ); } return componentTemplate; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 64505d76e26b8..0505f41b27599 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -29,7 +29,7 @@ *

* The request requires the query to be set using {@link #query(QueryBuilder)} */ -public class ValidateQueryRequest extends BroadcastRequest implements ToXContentObject { +public final class ValidateQueryRequest extends BroadcastRequest implements ToXContentObject { public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.fromOptions(false, false, true, false); @@ -65,7 +65,6 @@ public ValidateQueryRequest(StreamInput in) throws IOException { * Constructs a new validate request against the provided indices. No indices provided means it will * run against all indices. */ - @SuppressWarnings("this-escape") public ValidateQueryRequest(String... indices) { super(indices); indicesOptions(DEFAULT_INDICES_OPTIONS); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index f071ffb22fd5d..402b41761e3d1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -21,7 +21,7 @@ * Implements the low-level details of bulk request handling */ public final class BulkRequestHandler { - private final Logger logger; + private static final Logger logger = LogManager.getLogger(BulkRequestHandler.class); private final BiConsumer> consumer; private final BulkProcessor.Listener listener; private final Semaphore semaphore; @@ -36,7 +36,6 @@ public final class BulkRequestHandler { int concurrentRequests ) { assert concurrentRequests >= 0; - this.logger = LogManager.getLogger(getClass()); this.consumer = consumer; this.listener = listener; this.concurrentRequests = concurrentRequests; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index f3473f274bf38..bd929b9a2204e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -24,7 +24,10 @@ import java.io.IOException; import java.util.Set; -public class BulkShardRequest extends ReplicatedWriteRequest implements Accountable, RawIndexingDataTransportRequest { +public final class BulkShardRequest extends ReplicatedWriteRequest + implements + Accountable, + RawIndexingDataTransportRequest { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(BulkShardRequest.class); @@ -35,7 +38,6 @@ public BulkShardRequest(StreamInput in) throws IOException { items = in.readArray(i -> i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new); } - @SuppressWarnings("this-escape") public BulkShardRequest(ShardId shardId, RefreshPolicy refreshPolicy, BulkItemRequest[] items) { super(shardId); this.items = items; diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java index 68a4e0d0b04c9..4ecb092f34d4b 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.action.datastreams; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; @@ -66,20 +65,14 @@ public ActionRequestValidationException validate() { public Request(StreamInput in) throws IOException { super(in); this.name = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - this.startTime = in.readVLong(); - } else { - this.startTime = System.currentTimeMillis(); - } + this.startTime = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(name); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeVLong(startTime); - } + out.writeVLong(startTime); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 5d015283aa7cd..2dde9e53f5e7f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -46,7 +46,8 @@ static void executeDocument( pipeline.getDescription(), pipeline.getVersion(), pipeline.getMetadata(), - verbosePipelineProcessor + verbosePipelineProcessor, + pipeline.getDeprecated() ); ingestDocument.executePipeline(verbosePipeline, (result, e) -> { handler.accept(new SimulateDocumentVerboseResult(processorResultList), e); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 487a5c5653998..b6a9179b1e956 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -455,7 +455,7 @@ public String toString() { * and how many of them were skipped and further details in a Map of Cluster objects * (when doing a cross-cluster search). */ - public static class Clusters implements ToXContentFragment, Writeable { + public static final class Clusters implements ToXContentFragment, Writeable { public static final Clusters EMPTY = new Clusters(0, 0, 0); @@ -538,7 +538,6 @@ public Clusters(int total, int successful, int skipped) { this.clusterInfo = Collections.emptyMap(); // will never be used if created from this constructor } - @SuppressWarnings("this-escape") public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); int successfulTemp = in.readVInt(); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java index 9cfe0a1f1b992..b1594bf5ba935 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java @@ -20,7 +20,7 @@ * * */ -public class BroadcastShardOperationFailedException extends ElasticsearchException implements ElasticsearchWrapperException { +public final class BroadcastShardOperationFailedException extends ElasticsearchException implements ElasticsearchWrapperException { public BroadcastShardOperationFailedException(ShardId shardId, String msg) { this(shardId, msg, null); @@ -30,7 +30,6 @@ public BroadcastShardOperationFailedException(ShardId shardId, Throwable cause) this(shardId, "", cause); } - @SuppressWarnings("this-escape") public BroadcastShardOperationFailedException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java index 1f347ec2b8cac..1604ff81603ab 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java @@ -661,13 +661,11 @@ public interface ReplicaResponse { } - public static class RetryOnPrimaryException extends ElasticsearchException { - @SuppressWarnings("this-escape") + public static final class RetryOnPrimaryException extends ElasticsearchException { public RetryOnPrimaryException(ShardId shardId, String msg) { this(shardId, msg, null); } - @SuppressWarnings("this-escape") RetryOnPrimaryException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 411f23a0fc0ad..0abe7ad678dc5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -605,9 +605,8 @@ protected Releasable checkReplicaLimits(final ReplicaRequest request) { return () -> {}; } - public static class RetryOnReplicaException extends ElasticsearchException { + public static final class RetryOnReplicaException extends ElasticsearchException { - @SuppressWarnings("this-escape") public RetryOnReplicaException(ShardId shardId, String msg) { super(msg); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 56edc5117a28b..650b9db7f3d69 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -50,7 +50,7 @@ */ // It's not possible to suppress teh warning at #realtime(boolean) at a method-level. @SuppressWarnings("unchecked") -public class TermVectorsRequest extends SingleShardRequest implements RealtimeRequest { +public final class TermVectorsRequest extends SingleShardRequest implements RealtimeRequest { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TermVectorsRequest.class); private static final ParseField INDEX = new ParseField("_index"); @@ -79,7 +79,7 @@ public class TermVectorsRequest extends SingleShardRequest i private long version = Versions.MATCH_ANY; - protected String preference; + private String preference; private static final AtomicInteger randomInt = new AtomicInteger(0); @@ -204,7 +204,6 @@ public TermVectorsRequest(TermVectorsRequest other) { this.filterSettings = other.filterSettings(); } - @SuppressWarnings("this-escape") public TermVectorsRequest(MultiGetRequest.Item item) { super(item.index()); this.id = item.id(); diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 8bcb6a28fb50a..0f84ecab5f8b2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -841,9 +841,8 @@ public String toString() { } } - public static class NoLongerPrimaryShardException extends ElasticsearchException { + public static final class NoLongerPrimaryShardException extends ElasticsearchException { - @SuppressWarnings("this-escape") public NoLongerPrimaryShardException(ShardId shardId, String msg) { super(msg); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java index 86e5d6739fcb7..feb0543aad625 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java @@ -61,7 +61,7 @@ * considering a follower to be faulty, to allow for a brief network partition or a long GC cycle to occur without triggering the removal of * a node and the consequent shard reallocation. */ -public class FollowersChecker { +public final class FollowersChecker { private static final Logger logger = LogManager.getLogger(FollowersChecker.class); @@ -105,7 +105,6 @@ public class FollowersChecker { private final NodeHealthService nodeHealthService; private volatile FastResponseState fastResponseState; - @SuppressWarnings("this-escape") public FollowersChecker( Settings settings, TransportService transportService, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java b/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java index a308a8e0e6c75..b17eb731cfc6a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java @@ -21,7 +21,7 @@ import java.util.function.LongConsumer; public abstract class PreVoteCollector { - private final Logger logger = LogManager.getLogger(PreVoteCollector.class); + private static final Logger logger = LogManager.getLogger(PreVoteCollector.class); // Tuple for simple atomic updates. null until the first call to `update()`. protected volatile Tuple state; // DiscoveryNode component is null if there is currently no known diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java index 090f647ee349a..dac5005e0e043 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java @@ -8,6 +8,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; @@ -35,18 +36,20 @@ public class ComponentTemplate implements SimpleDiffable, ToX private static final ParseField TEMPLATE = new ParseField("template"); private static final ParseField VERSION = new ParseField("version"); private static final ParseField METADATA = new ParseField("_meta"); + private static final ParseField DEPRECATED = new ParseField("deprecated"); @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "component_template", false, - a -> new ComponentTemplate((Template) a[0], (Long) a[1], (Map) a[2]) + a -> new ComponentTemplate((Template) a[0], (Long) a[1], (Map) a[2], (Boolean) a[3]) ); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), Template.PARSER, TEMPLATE); PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), VERSION); PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), METADATA); + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), DEPRECATED); } private final Template template; @@ -54,6 +57,8 @@ public class ComponentTemplate implements SimpleDiffable, ToX private final Long version; @Nullable private final Map metadata; + @Nullable + private final Boolean deprecated; static Diff readComponentTemplateDiffFrom(StreamInput in) throws IOException { return SimpleDiffable.readDiffFrom(ComponentTemplate::new, in); @@ -64,9 +69,19 @@ public static ComponentTemplate parse(XContentParser parser) { } public ComponentTemplate(Template template, @Nullable Long version, @Nullable Map metadata) { + this(template, version, metadata, null); + } + + public ComponentTemplate( + Template template, + @Nullable Long version, + @Nullable Map metadata, + @Nullable Boolean deprecated + ) { this.template = template; this.version = version; this.metadata = metadata; + this.deprecated = deprecated; } public ComponentTemplate(StreamInput in) throws IOException { @@ -77,6 +92,11 @@ public ComponentTemplate(StreamInput in) throws IOException { } else { this.metadata = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.DEPRECATED_COMPONENT_TEMPLATES_ADDED)) { + this.deprecated = in.readOptionalBoolean(); + } else { + deprecated = null; + } } public Template template() { @@ -93,6 +113,14 @@ public Map metadata() { return metadata; } + public Boolean deprecated() { + return deprecated; + } + + public boolean isDeprecated() { + return Boolean.TRUE.equals(deprecated); + } + @Override public void writeTo(StreamOutput out) throws IOException { this.template.writeTo(out); @@ -103,11 +131,14 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(true); out.writeGenericMap(this.metadata); } + if (out.getTransportVersion().onOrAfter(TransportVersions.DEPRECATED_COMPONENT_TEMPLATES_ADDED)) { + out.writeOptionalBoolean(this.deprecated); + } } @Override public int hashCode() { - return Objects.hash(template, version, metadata); + return Objects.hash(template, version, metadata, deprecated); } @Override @@ -121,7 +152,8 @@ public boolean equals(Object obj) { ComponentTemplate other = (ComponentTemplate) obj; return Objects.equals(template, other.template) && Objects.equals(version, other.version) - && Objects.equals(metadata, other.metadata); + && Objects.equals(metadata, other.metadata) + && Objects.equals(deprecated, other.deprecated); } @Override @@ -148,6 +180,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla if (this.metadata != null) { builder.field(METADATA.getPreferredName(), this.metadata); } + if (this.deprecated != null) { + builder.field(DEPRECATED.getPreferredName(), this.deprecated); + } builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index bd745e7ff4ea6..47ab1d099c037 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -46,6 +46,7 @@ public class ComposableIndexTemplate implements SimpleDiffable PARSER = new ConstructingObjectParser<>( @@ -60,7 +61,8 @@ public class ComposableIndexTemplate implements SimpleDiffable) a[5], (DataStreamTemplate) a[6], (Boolean) a[7], - (List) a[8] + (List) a[8], + (Boolean) a[9] ) ); @@ -74,6 +76,7 @@ public class ComposableIndexTemplate implements SimpleDiffable indexPatterns; @@ -93,6 +96,8 @@ public class ComposableIndexTemplate implements SimpleDiffable ignoreMissingComponentTemplates; + @Nullable + private final Boolean deprecated; static Diff readITV2DiffFrom(StreamInput in) throws IOException { return SimpleDiffable.readDiffFrom(ComposableIndexTemplate::new, in); @@ -135,10 +140,10 @@ public ComposableIndexTemplate( @Nullable DataStreamTemplate dataStreamTemplate, @Nullable Boolean allowAutoCreate ) { - this(indexPatterns, template, componentTemplates, priority, version, metadata, dataStreamTemplate, null, null); + this(indexPatterns, template, componentTemplates, priority, version, metadata, dataStreamTemplate, allowAutoCreate, null); } - public ComposableIndexTemplate( + ComposableIndexTemplate( List indexPatterns, @Nullable Template template, @Nullable List componentTemplates, @@ -148,6 +153,32 @@ public ComposableIndexTemplate( @Nullable DataStreamTemplate dataStreamTemplate, @Nullable Boolean allowAutoCreate, @Nullable List ignoreMissingComponentTemplates + ) { + this( + indexPatterns, + template, + componentTemplates, + priority, + version, + metadata, + dataStreamTemplate, + allowAutoCreate, + ignoreMissingComponentTemplates, + null + ); + } + + public ComposableIndexTemplate( + List indexPatterns, + @Nullable Template template, + @Nullable List componentTemplates, + @Nullable Long priority, + @Nullable Long version, + @Nullable Map metadata, + @Nullable DataStreamTemplate dataStreamTemplate, + @Nullable Boolean allowAutoCreate, + @Nullable List ignoreMissingComponentTemplates, + @Nullable Boolean deprecated ) { this.indexPatterns = indexPatterns; this.template = template; @@ -158,6 +189,7 @@ public ComposableIndexTemplate( this.dataStreamTemplate = dataStreamTemplate; this.allowAutoCreate = allowAutoCreate; this.ignoreMissingComponentTemplates = ignoreMissingComponentTemplates; + this.deprecated = deprecated; } public ComposableIndexTemplate(StreamInput in) throws IOException { @@ -178,6 +210,11 @@ public ComposableIndexTemplate(StreamInput in) throws IOException { } else { this.ignoreMissingComponentTemplates = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.DEPRECATED_COMPONENT_TEMPLATES_ADDED)) { + this.deprecated = in.readOptionalBoolean(); + } else { + this.deprecated = null; + } } public List indexPatterns() { @@ -250,6 +287,14 @@ public List getIgnoreMissingComponentTemplates() { return ignoreMissingComponentTemplates; } + public Boolean deprecated() { + return deprecated; + } + + public boolean isDeprecated() { + return Boolean.TRUE.equals(deprecated); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(this.indexPatterns); @@ -268,6 +313,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { out.writeOptionalStringCollection(ignoreMissingComponentTemplates); } + if (out.getTransportVersion().onOrAfter(TransportVersions.DEPRECATED_COMPONENT_TEMPLATES_ADDED)) { + out.writeOptionalBoolean(deprecated); + } } @Override @@ -307,6 +355,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla if (this.ignoreMissingComponentTemplates != null) { builder.stringListField(IGNORE_MISSING_COMPONENT_TEMPLATES.getPreferredName(), ignoreMissingComponentTemplates); } + if (this.deprecated != null) { + builder.field(DEPRECATED.getPreferredName(), deprecated); + } builder.endObject(); return builder; } @@ -322,7 +373,8 @@ public int hashCode() { this.metadata, this.dataStreamTemplate, this.allowAutoCreate, - this.ignoreMissingComponentTemplates + this.ignoreMissingComponentTemplates, + this.deprecated ); } @@ -343,7 +395,8 @@ && componentTemplatesEquals(this.componentTemplates, other.componentTemplates) && Objects.equals(this.metadata, other.metadata) && Objects.equals(this.dataStreamTemplate, other.dataStreamTemplate) && Objects.equals(this.allowAutoCreate, other.allowAutoCreate) - && Objects.equals(this.ignoreMissingComponentTemplates, other.ignoreMissingComponentTemplates); + && Objects.equals(this.ignoreMissingComponentTemplates, other.ignoreMissingComponentTemplates) + && Objects.equals(deprecated, other.deprecated); } static boolean componentTemplatesEquals(List c1, List c2) { @@ -480,6 +533,7 @@ public static class Builder { private DataStreamTemplate dataStreamTemplate; private Boolean allowAutoCreate; private List ignoreMissingComponentTemplates; + private Boolean deprecated; public Builder() {} @@ -528,6 +582,11 @@ public Builder ignoreMissingComponentTemplates(List ignoreMissingCompone return this; } + public Builder deprecated(@Nullable Boolean deprecated) { + this.deprecated = deprecated; + return this; + } + public ComposableIndexTemplate build() { return new ComposableIndexTemplate( this.indexPatterns, @@ -538,7 +597,8 @@ public ComposableIndexTemplate build() { this.metadata, this.dataStreamTemplate, this.allowAutoCreate, - this.ignoreMissingComponentTemplates + this.ignoreMissingComponentTemplates, + this.deprecated ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java index 7a40d7fd774d1..35b7d957bf076 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java @@ -224,7 +224,7 @@ public String toString() { } } - public static class Builder { + public static final class Builder { private static final Set VALID_FIELDS = Set.of("order", "mappings", "settings", "index_patterns", "aliases", "version"); @@ -248,7 +248,6 @@ public Builder(String name) { aliases = new HashMap<>(); } - @SuppressWarnings("this-escape") public Builder(IndexTemplateMetadata indexTemplateMetadata) { this.name = indexTemplateMetadata.name(); order(indexTemplateMetadata.order()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index cf63602729bb4..e0dc1728eab6a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -28,6 +28,8 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -50,6 +52,8 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.ingest.IngestMetadata; +import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; @@ -122,6 +126,7 @@ public class MetadataIndexTemplateService { } private static final Logger logger = LogManager.getLogger(MetadataIndexTemplateService.class); + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MetadataIndexTemplateService.class); private final ClusterService clusterService; private final MasterServiceTaskQueue taskQueue; @@ -304,7 +309,12 @@ public ClusterState addComponentTemplate( template.template().aliases(), template.template().lifecycle() ); - final ComponentTemplate finalComponentTemplate = new ComponentTemplate(finalTemplate, template.version(), template.metadata()); + final ComponentTemplate finalComponentTemplate = new ComponentTemplate( + finalTemplate, + template.version(), + template.metadata(), + template.deprecated() + ); if (finalComponentTemplate.equals(existing)) { return currentState; @@ -614,7 +624,8 @@ public ClusterState addIndexTemplateV2( template.metadata(), template.getDataStreamTemplate(), template.getAllowAutoCreate(), - template.getIgnoreMissingComponentTemplates() + template.getIgnoreMissingComponentTemplates(), + template.deprecated() ); } @@ -716,13 +727,21 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT indexTemplate.metadata(), indexTemplate.getDataStreamTemplate(), indexTemplate.getAllowAutoCreate(), - indexTemplate.getIgnoreMissingComponentTemplates() + indexTemplate.getIgnoreMissingComponentTemplates(), + indexTemplate.deprecated() ); validate(name, templateToValidate); validateDataStreamsStillReferenced(currentState, name, templateToValidate); validateLifecycleIsOnlyAppliedOnDataStreams(currentState.metadata(), name, templateToValidate); + if (templateToValidate.isDeprecated() == false) { + validateUseOfDeprecatedComponentTemplates(name, templateToValidate, currentState.metadata().componentTemplates()); + validateUseOfDeprecatedIngestPipelines(name, currentState.metadata().custom(IngestMetadata.TYPE), combinedSettings); + // TODO come up with a plan how to validate usage of deprecated ILM policies + // we don't have access to the core/main plugin here so we can't use the IndexLifecycleMetadata type + } + // Finally, right before adding the template, we need to ensure that the composite settings, // mappings, and aliases are valid after it's been composed with the component templates try { @@ -739,6 +758,50 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT } } + private void validateUseOfDeprecatedComponentTemplates( + String name, + ComposableIndexTemplate template, + Map componentTemplates + ) { + template.composedOf() + .stream() + .map(ct -> Tuple.tuple(ct, componentTemplates.get(ct))) + .filter(ct -> Objects.nonNull(ct.v2())) + .filter(ct -> ct.v2().isDeprecated()) + .forEach( + ct -> deprecationLogger.warn( + DeprecationCategory.TEMPLATES, + "use_of_deprecated_component_template", + "index template [{}] uses deprecated component template [{}]", + name, + ct.v1() + ) + ); + } + + private void validateUseOfDeprecatedIngestPipelines(String name, IngestMetadata ingestMetadata, Settings combinedSettings) { + Map pipelines = Optional.ofNullable(ingestMetadata) + .map(IngestMetadata::getPipelines) + .orElse(Map.of()); + emitWarningIfPipelineIsDeprecated(name, pipelines, combinedSettings.get("index.default_pipeline")); + emitWarningIfPipelineIsDeprecated(name, pipelines, combinedSettings.get("index.final_pipeline")); + } + + private void emitWarningIfPipelineIsDeprecated(String name, Map pipelines, String pipelineName) { + Optional.ofNullable(pipelineName) + .map(pipelines::get) + .filter(p -> Boolean.TRUE.equals(p.getConfigAsMap().get("deprecated"))) + .ifPresent( + p -> deprecationLogger.warn( + DeprecationCategory.TEMPLATES, + "use_of_deprecated_ingest_pipeline", + "index template [{}] uses deprecated ingest pipeline [{}]", + name, + p.getId() + ) + ); + } + private static void validateLifecycleIsOnlyAppliedOnDataStreams( Metadata metadata, String indexTemplateName, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java index 0c5f547d1cb10..d17f3a297e805 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java @@ -60,6 +60,7 @@ import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; +import java.util.stream.StreamSupport; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toMap; @@ -119,12 +120,38 @@ public String name() { return NAME; } + /** + * Creates a new {@link ShardAllocationStatus} that will be used to track + * primary and replica availability, providing the color, diagnosis, and + * messages about the available or unavailable shards in the cluster. + * @param metadata Metadata for the cluster + * @return A new ShardAllocationStatus that has not yet been filled. + */ + ShardAllocationStatus createNewStatus(Metadata metadata) { + return new ShardAllocationStatus(metadata); + } + @Override public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { var state = clusterService.state(); var shutdown = state.getMetadata().custom(NodesShutdownMetadata.TYPE, NodesShutdownMetadata.EMPTY); - var status = new ShardAllocationStatus(state.getMetadata()); + var status = createNewStatus(state.getMetadata()); + updateShardAllocationStatus(status, state, shutdown, verbose); + return createIndicator( + status.getStatus(), + status.getSymptom(), + status.getDetails(verbose), + status.getImpacts(), + status.getDiagnosis(verbose, maxAffectedResourcesCount) + ); + } + static void updateShardAllocationStatus( + ShardAllocationStatus status, + ClusterState state, + NodesShutdownMetadata shutdown, + boolean verbose + ) { for (IndexRoutingTable indexShardRouting : state.routingTable()) { for (int i = 0; i < indexShardRouting.size(); i++) { IndexShardRoutingTable shardRouting = indexShardRouting.shard(i); @@ -136,13 +163,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources } status.updateSearchableSnapshotsOfAvailableIndices(); - return createIndicator( - status.getStatus(), - status.getSymptom(), - status.getDetails(verbose), - status.getImpacts(), - status.getDiagnosis(verbose, maxAffectedResourcesCount) - ); } // Impact IDs @@ -395,22 +415,27 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources ) ); - private class ShardAllocationCounts { - private int unassigned = 0; - private int unassigned_new = 0; - private int unassigned_restarting = 0; - private int initializing = 0; - private int started = 0; - private int relocating = 0; - private final Set indicesWithUnavailableShards = new HashSet<>(); + class ShardAllocationCounts { + int unassigned = 0; + int unassigned_new = 0; + int unassigned_restarting = 0; + int initializing = 0; + int started = 0; + int relocating = 0; + final Set indicesWithUnavailableShards = new HashSet<>(); + final Set indicesWithAllShardsUnavailable = new HashSet<>(); // We keep the searchable snapshots separately as long as the original index is still available // This is checked during the post-processing - private final SearchableSnapshotsState searchableSnapshotsState = new SearchableSnapshotsState(); - private final Map> diagnosisDefinitions = new HashMap<>(); + SearchableSnapshotsState searchableSnapshotsState = new SearchableSnapshotsState(); + final Map> diagnosisDefinitions = new HashMap<>(); public void increment(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { boolean isNew = isUnassignedDueToNewInitialization(routing, state); boolean isRestarting = isUnassignedDueToTimelyRestart(routing, shutdowns); + boolean allUnavailable = areAllShardsOfThisTypeUnavailable(routing, state); + if (allUnavailable) { + indicesWithAllShardsUnavailable.add(routing.getIndexName()); + } if ((routing.active() || isRestarting || isNew) == false) { String indexName = routing.getIndexName(); Settings indexSettings = state.getMetadata().index(indexName).getSettings(); @@ -451,11 +476,31 @@ public boolean areAllAvailable() { return indicesWithUnavailableShards.isEmpty(); } + public boolean doAnyIndicesHaveAllUnavailable() { + return indicesWithAllShardsUnavailable.isEmpty() == false; + } + private void addDefinition(Diagnosis.Definition diagnosisDefinition, String indexName) { diagnosisDefinitions.computeIfAbsent(diagnosisDefinition, (k) -> new HashSet<>()).add(indexName); } } + /** + * Returns true if all the shards of the same type (primary or replica) are unassigned. For + * example: if a replica is passed then this will return true if ALL replicas are unassigned, + * but if at least one is assigned, it will return false. + */ + private boolean areAllShardsOfThisTypeUnavailable(ShardRouting routing, ClusterState state) { + return StreamSupport.stream( + state.routingTable().allActiveShardsGrouped(new String[] { routing.getIndexName() }, true).spliterator(), + false + ) + .flatMap(shardIter -> shardIter.getShardRoutings().stream()) + .filter(sr -> sr.shardId().equals(routing.shardId())) + .filter(sr -> sr.primary() == routing.primary()) + .allMatch(ShardRouting::unassigned); + } + private static boolean isUnassignedDueToTimelyRestart(ShardRouting routing, NodesShutdownMetadata shutdowns) { var info = routing.unassignedInfo(); if (info == null || info.getReason() != UnassignedInfo.Reason.NODE_RESTARTING) { @@ -805,9 +850,9 @@ private static Optional checkNotEnoughNodesInDataTier( } class ShardAllocationStatus { - private final ShardAllocationCounts primaries = new ShardAllocationCounts(); - private final ShardAllocationCounts replicas = new ShardAllocationCounts(); - private final Metadata clusterMetadata; + final ShardAllocationCounts primaries = new ShardAllocationCounts(); + final ShardAllocationCounts replicas = new ShardAllocationCounts(); + final Metadata clusterMetadata; ShardAllocationStatus(Metadata clusterMetadata) { this.clusterMetadata = clusterMetadata; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java index 853a26263fe9f..5df5de43cffdd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java @@ -31,15 +31,18 @@ import java.util.Map; import java.util.function.ToDoubleFunction; -public record ClusterBalanceStats(Map tiers, Map nodes) - implements - Writeable, - ToXContentObject { +public record ClusterBalanceStats( + int shards, + int undesiredShardAllocations, + Map tiers, + Map nodes +) implements Writeable, ToXContentObject { - public static ClusterBalanceStats EMPTY = new ClusterBalanceStats(Map.of(), Map.of()); + public static ClusterBalanceStats EMPTY = new ClusterBalanceStats(0, 0, Map.of(), Map.of()); public static ClusterBalanceStats createFrom( ClusterState clusterState, + DesiredBalance desiredBalance, ClusterInfo clusterInfo, WriteLoadForecaster writeLoadForecaster ) { @@ -50,32 +53,60 @@ public static ClusterBalanceStats createFrom( if (dataRoles.isEmpty()) { continue; } - var nodeStats = NodeBalanceStats.createFrom(routingNode, clusterState.metadata(), clusterInfo, writeLoadForecaster); + var nodeStats = NodeBalanceStats.createFrom( + routingNode, + clusterState.metadata(), + desiredBalance, + clusterInfo, + writeLoadForecaster + ); nodes.put(routingNode.node().getName(), nodeStats); for (DiscoveryNodeRole role : dataRoles) { tierToNodeStats.computeIfAbsent(role.roleName(), ignored -> new ArrayList<>()).add(nodeStats); } } - return new ClusterBalanceStats(Maps.transformValues(tierToNodeStats, TierBalanceStats::createFrom), nodes); + return new ClusterBalanceStats( + nodes.values().stream().mapToInt(NodeBalanceStats::shards).sum(), + nodes.values().stream().mapToInt(NodeBalanceStats::undesiredShardAllocations).sum(), + Maps.transformValues(tierToNodeStats, TierBalanceStats::createFrom), + nodes + ); } public static ClusterBalanceStats readFrom(StreamInput in) throws IOException { - return new ClusterBalanceStats(in.readImmutableMap(TierBalanceStats::readFrom), in.readImmutableMap(NodeBalanceStats::readFrom)); + return new ClusterBalanceStats( + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) ? in.readVInt() : -1, + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) ? in.readVInt() : -1, + in.readImmutableMap(TierBalanceStats::readFrom), + in.readImmutableMap(NodeBalanceStats::readFrom) + ); } @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + out.writeVInt(shards); + } + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + out.writeVInt(undesiredShardAllocations); + } out.writeMap(tiers, StreamOutput::writeWriteable); out.writeMap(nodes, StreamOutput::writeWriteable); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().field("tiers").map(tiers).field("nodes").map(nodes).endObject(); + return builder.startObject() + .field("shard_count", shards) + .field("undesired_shard_allocation_count", undesiredShardAllocations) + .field("tiers", tiers) + .field("nodes", nodes) + .endObject(); } public record TierBalanceStats( MetricStats shardCount, + MetricStats undesiredShardAllocations, MetricStats forecastWriteLoad, MetricStats forecastShardSize, MetricStats actualShardSize @@ -84,6 +115,7 @@ public record TierBalanceStats( private static TierBalanceStats createFrom(List nodes) { return new TierBalanceStats( MetricStats.createFrom(nodes, it -> it.shards), + MetricStats.createFrom(nodes, it -> it.undesiredShardAllocations), MetricStats.createFrom(nodes, it -> it.forecastWriteLoad), MetricStats.createFrom(nodes, it -> it.forecastShardSize), MetricStats.createFrom(nodes, it -> it.actualShardSize) @@ -93,6 +125,9 @@ private static TierBalanceStats createFrom(List nodes) { public static TierBalanceStats readFrom(StreamInput in) throws IOException { return new TierBalanceStats( MetricStats.readFrom(in), + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) + ? MetricStats.readFrom(in) + : new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), MetricStats.readFrom(in), MetricStats.readFrom(in), MetricStats.readFrom(in) @@ -102,6 +137,9 @@ public static TierBalanceStats readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { shardCount.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + undesiredShardAllocations.writeTo(out); + } forecastWriteLoad.writeTo(out); forecastShardSize.writeTo(out); actualShardSize.writeTo(out); @@ -111,6 +149,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.startObject() .field("shard_count", shardCount) + .field("undesired_shard_allocation_count", undesiredShardAllocations) .field("forecast_write_load", forecastWriteLoad) .field("forecast_disk_usage", forecastShardSize) .field("actual_disk_usage", actualShardSize) @@ -172,6 +211,7 @@ public record NodeBalanceStats( String nodeId, List roles, int shards, + int undesiredShardAllocations, double forecastWriteLoad, long forecastShardSize, long actualShardSize @@ -182,9 +222,11 @@ public record NodeBalanceStats( private static NodeBalanceStats createFrom( RoutingNode routingNode, Metadata metadata, + DesiredBalance desiredBalance, ClusterInfo clusterInfo, WriteLoadForecaster writeLoadForecaster ) { + int undesired = 0; double forecastWriteLoad = 0.0; long forecastShardSize = 0L; long actualShardSize = 0L; @@ -196,23 +238,37 @@ private static NodeBalanceStats createFrom( forecastWriteLoad += writeLoadForecaster.getForecastedWriteLoad(indexMetadata).orElse(0.0); forecastShardSize += indexMetadata.getForecastedShardSizeInBytes().orElse(shardSize); actualShardSize += shardSize; + if (isDesiredShardAllocation(shardRouting, desiredBalance) == false) { + undesired++; + } } return new NodeBalanceStats( routingNode.nodeId(), routingNode.node().getRoles().stream().map(DiscoveryNodeRole::roleName).toList(), routingNode.size(), + undesired, forecastWriteLoad, forecastShardSize, actualShardSize ); } + private static boolean isDesiredShardAllocation(ShardRouting shardRouting, DesiredBalance desiredBalance) { + if (shardRouting.relocating()) { + // relocating out shards are temporarily accepted + return true; + } + var assignment = desiredBalance.getAssignment(shardRouting.shardId()); + return assignment != null && assignment.nodeIds().contains(shardRouting.currentNodeId()); + } + public static NodeBalanceStats readFrom(StreamInput in) throws IOException { return new NodeBalanceStats( in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) ? in.readString() : UNKNOWN_NODE_ID, in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) ? in.readStringCollectionAsList() : List.of(), in.readInt(), + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) ? in.readVInt() : -1, in.readDouble(), in.readLong(), in.readLong() @@ -228,6 +284,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(roles); } out.writeInt(shards); + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + out.writeVInt(undesiredShardAllocations); + } out.writeDouble(forecastWriteLoad); out.writeLong(forecastShardSize); out.writeLong(actualShardSize); @@ -241,6 +300,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } return builder.field("roles", roles) .field("shard_count", shards) + .field("undesired_shard_allocation_count", undesiredShardAllocations) .field("forecast_write_load", forecastWriteLoad) .humanReadableField("forecast_disk_usage_bytes", "forecast_disk_usage", ByteSizeValue.ofBytes(forecastShardSize)) .humanReadableField("actual_disk_usage_bytes", "actual_disk_usage", ByteSizeValue.ofBytes(actualShardSize)) diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index e6ed24dc7220a..74de0b2e03e60 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -25,10 +25,10 @@ import java.io.IOException; import java.util.Locale; -public class GeoPoint implements SpatialPoint, ToXContentFragment { +public final class GeoPoint implements SpatialPoint, ToXContentFragment { - protected double lat; - protected double lon; + private double lat; + private double lon; public GeoPoint() {} @@ -38,7 +38,6 @@ public GeoPoint() {} * * @param value String to create the point from */ - @SuppressWarnings("this-escape") public GeoPoint(String value) { this.resetFromString(value); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/CreationException.java b/server/src/main/java/org/elasticsearch/common/inject/CreationException.java index f09248de947e9..78f89e95e5ff7 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/CreationException.java +++ b/server/src/main/java/org/elasticsearch/common/inject/CreationException.java @@ -27,13 +27,12 @@ * * @author crazybob@google.com (Bob Lee) */ -public class CreationException extends RuntimeException { +public final class CreationException extends RuntimeException { private final Collection messages; /** * Creates a CreationException containing {@code messages}. */ - @SuppressWarnings("this-escape") public CreationException(Collection messages) { this.messages = messages; if (this.messages.isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java index c7e9a4abf2c57..478ae231e16ff 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java @@ -17,18 +17,16 @@ * Resettable {@link StreamInput} that wraps a byte array. It is heavily inspired in Lucene's * {@link org.apache.lucene.store.ByteArrayDataInput}. */ -public class ByteArrayStreamInput extends StreamInput { +public final class ByteArrayStreamInput extends StreamInput { private byte[] bytes; private int pos; private int limit; - @SuppressWarnings("this-escape") public ByteArrayStreamInput() { reset(BytesRef.EMPTY_BYTES); } - @SuppressWarnings("this-escape") public ByteArrayStreamInput(byte[] bytes) { reset(bytes); } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java index 6a02bedcdf086..42fb7f4a6afe4 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java @@ -17,9 +17,8 @@ * This {@link StreamOutput} writes nowhere. It can be used to check if serialization would * be successful writing to a specific version. */ -public class VersionCheckingStreamOutput extends StreamOutput { +public final class VersionCheckingStreamOutput extends StreamOutput { - @SuppressWarnings("this-escape") public VersionCheckingStreamOutput(TransportVersion version) { setTransportVersion(version); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java b/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java index 54b5749b797f7..93ca7a9615be6 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java @@ -32,14 +32,13 @@ public static ECSJsonLayout.Builder newBuilder() { return new ECSJsonLayout.Builder().asBuilder(); } - public static class Builder extends AbstractStringLayout.Builder + public static final class Builder extends AbstractStringLayout.Builder implements org.apache.logging.log4j.core.util.Builder { @PluginAttribute("dataset") String dataset; - @SuppressWarnings("this-escape") public Builder() { setCharset(StandardCharsets.UTF_8); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java b/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java index fb7475e3cba53..a5272b8074d79 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java @@ -147,7 +147,7 @@ PatternLayout getPatternLayout() { return patternLayout; } - public static class Builder> extends AbstractStringLayout.Builder + public static final class Builder> extends AbstractStringLayout.Builder implements org.apache.logging.log4j.core.util.Builder { @@ -163,7 +163,6 @@ public static class Builder> extends AbstractS @PluginConfiguration private Configuration config; - @SuppressWarnings("this-escape") public Builder() { setCharset(StandardCharsets.UTF_8); } diff --git a/server/src/main/java/org/elasticsearch/common/metrics/Counters.java b/server/src/main/java/org/elasticsearch/common/metrics/Counters.java index 665ed371955c6..9606fc7687595 100644 --- a/server/src/main/java/org/elasticsearch/common/metrics/Counters.java +++ b/server/src/main/java/org/elasticsearch/common/metrics/Counters.java @@ -28,11 +28,10 @@ * that will not have conflicts, which means that there no counter will have a label which is a substring of the label of another counter. * For example, the counters `foo: 1` and `foo.bar: 3` cannot co-exist in a nested map. */ -public class Counters implements Writeable { +public final class Counters implements Writeable { private final ConcurrentMap counters = new ConcurrentHashMap<>(); - @SuppressWarnings("this-escape") public Counters(StreamInput in) throws IOException { int numCounters = in.readVInt(); for (int i = 0; i < numCounters; i++) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java index 1ac3db3827eb4..b3639079cc920 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java +++ b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java @@ -65,7 +65,7 @@ * } * } */ -public class LocallyMountedSecrets implements SecureSettings { +public final class LocallyMountedSecrets implements SecureSettings { public static final String SECRETS_FILE_NAME = "secrets.json"; public static final String SECRETS_DIRECTORY = "secrets"; @@ -116,7 +116,6 @@ public class LocallyMountedSecrets implements SecureSettings { /** * Direct constructor to be used by the CLI */ - @SuppressWarnings("this-escape") public LocallyMountedSecrets(Environment environment) { var secretsDirPath = resolveSecretsDir(environment); var secretsFilePath = resolveSecretsFile(environment); diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java index 91dbfc30123fe..c78db448380b3 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java @@ -22,7 +22,7 @@ /** * Compact serializable container for ByteRefs */ -public class BytesRefArray implements Accountable, Releasable, Writeable { +public final class BytesRefArray implements Accountable, Releasable, Writeable { // base size of the bytes ref array private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArray.class); @@ -32,7 +32,6 @@ public class BytesRefArray implements Accountable, Releasable, Writeable { private ByteArray bytes; private long size; - @SuppressWarnings("this-escape") public BytesRefArray(long capacity, BigArrays bigArrays) { this.bigArrays = bigArrays; boolean success = false; @@ -49,7 +48,6 @@ public BytesRefArray(long capacity, BigArrays bigArrays) { size = 0; } - @SuppressWarnings("this-escape") public BytesRefArray(StreamInput in, BigArrays bigArrays) throws IOException { this.bigArrays = bigArrays; // we allocate big arrays so we have to `close` if we fail here or we'll leak them. diff --git a/server/src/main/java/org/elasticsearch/common/util/FeatureFlag.java b/server/src/main/java/org/elasticsearch/common/util/FeatureFlag.java index 5bbf7aff906c0..1be14026c20c8 100644 --- a/server/src/main/java/org/elasticsearch/common/util/FeatureFlag.java +++ b/server/src/main/java/org/elasticsearch/common/util/FeatureFlag.java @@ -36,7 +36,7 @@ */ public class FeatureFlag { - private final Logger logger = LogManager.getLogger(FeatureFlag.class); + private static final Logger logger = LogManager.getLogger(FeatureFlag.class); private final String name; private final boolean enabled; diff --git a/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java b/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java index f54500a806cca..860060ca5a346 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java @@ -17,7 +17,7 @@ * A hash table from native longs to objects. This implementation resolves collisions * using open-addressing and does not support null values. This class is not thread-safe. */ -public class LongObjectPagedHashMap extends AbstractPagedHashMap implements Iterable> { +public final class LongObjectPagedHashMap extends AbstractPagedHashMap implements Iterable> { private LongArray keys; private ObjectArray values; @@ -26,7 +26,6 @@ public LongObjectPagedHashMap(long capacity, BigArrays bigArrays) { this(capacity, DEFAULT_MAX_LOAD_FACTOR, bigArrays); } - @SuppressWarnings("this-escape") public LongObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); boolean success = false; diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index cc685b26ce239..0380bb80e0013 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -199,7 +199,7 @@ public String toString() { */ static final String SEARCHABLE_SHARED_CACHE_FILE = "shared_snapshot_cache"; - public static class NodeLock implements Releasable { + public static final class NodeLock implements Releasable { private final Lock[] locks; private final DataPath[] dataPaths; @@ -213,7 +213,6 @@ public NodeLock(final Logger logger, final Environment environment, final Checke * Tries to acquire a node lock for a node id, throws {@code IOException} if it is unable to acquire it * @param pathFunction function to check node path before attempt of acquiring a node lock */ - @SuppressWarnings("this-escape") public NodeLock( final Logger logger, final Environment environment, @@ -990,7 +989,7 @@ private final class InternalShardLock { lockDetails = Tuple.tuple(System.nanoTime(), details); } - protected void release() { + private void release() { mutex.release(); decWaitCount(); } diff --git a/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java b/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java index 001b35ab11cbd..4aab8b91a0733 100644 --- a/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java +++ b/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java @@ -17,15 +17,13 @@ /** * Exception used when the in-memory lock for a shard cannot be obtained */ -public class ShardLockObtainFailedException extends ElasticsearchException { +public final class ShardLockObtainFailedException extends ElasticsearchException { - @SuppressWarnings("this-escape") public ShardLockObtainFailedException(ShardId shardId, String message) { super(buildMessage(shardId, message)); this.setShard(shardId); } - @SuppressWarnings("this-escape") public ShardLockObtainFailedException(ShardId shardId, String message, Throwable cause) { super(buildMessage(shardId, message), cause); this.setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java b/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java index dd3176cf912a6..8dcea1bb0e7e2 100644 --- a/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java @@ -51,4 +51,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli new RestChunkedToXContentListener<>(channel) ); } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java b/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java index 18c009deb8840..f25168bd4e98c 100644 --- a/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java @@ -74,7 +74,7 @@ protected TransportHealthNodeAction( Writeable.Reader response, Executor executor ) { - super(actionName, true, transportService, actionFilters, request, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, false, transportService, actionFilters, request, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.transportService = transportService; this.clusterService = clusterService; this.threadPool = threadPool; diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index cf5a53111a9cb..2f3d376e39086 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -30,7 +30,7 @@ */ class HttpTracer { - private final Logger logger = LogManager.getLogger(HttpTracer.class); + private static final Logger logger = LogManager.getLogger(HttpTracer.class); private volatile String[] tracerLogInclude; private volatile String[] tracerLogExclude; diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index df1aca3dc7b53..ee2cb06cb9559 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -37,7 +37,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public class PerFieldMapperCodec extends Lucene95Codec { +public final class PerFieldMapperCodec extends Lucene95Codec { private final MapperService mapperService; private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); @@ -49,7 +49,6 @@ public class PerFieldMapperCodec extends Lucene95Codec { : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; } - @SuppressWarnings("this-escape") public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.mapperService = mapperService; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 0457a23d85105..403156c95540e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -191,11 +191,10 @@ protected String contentType() { return CONTENT_TYPE; } - public static class CustomBinaryDocValuesField extends CustomDocValuesField { + public static final class CustomBinaryDocValuesField extends CustomDocValuesField { private final List bytesList; - @SuppressWarnings("this-escape") public CustomBinaryDocValuesField(String name, byte[] bytes) { super(name); bytesList = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 1fb3f706c56a2..a5793df3b82e0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -68,7 +68,7 @@ private static BooleanFieldMapper toType(FieldMapper in) { return (BooleanFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter docValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); @@ -93,7 +93,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; - @SuppressWarnings("this-escape") public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault, IndexVersion indexCreatedVersion) { super(name); this.scriptCompiler = Objects.requireNonNull(scriptCompiler); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 21b9ec04c56c0..9d12fc6910d66 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -223,7 +223,7 @@ private static DateFieldMapper toType(FieldMapper in) { return (DateFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter index = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter docValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -253,7 +253,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final ScriptCompiler scriptCompiler; - @SuppressWarnings("this-escape") public Builder( String name, Resolution resolution, @@ -396,11 +395,11 @@ public DateFieldMapper build(MapperBuilderContext context) { }, MINIMUM_COMPATIBILITY_VERSION); public static final class DateFieldType extends MappedFieldType { - protected final DateFormatter dateTimeFormatter; - protected final DateMathParser dateMathParser; - protected final Resolution resolution; - protected final String nullValue; - protected final FieldValues scriptValues; + final DateFormatter dateTimeFormatter; + final DateMathParser dateMathParser; + private final Resolution resolution; + private final String nullValue; + private final FieldValues scriptValues; private final boolean pointsMetadataAvailable; public DateFieldType( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index f97817570838c..10e24fbeebb87 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -76,7 +76,7 @@ private static GeoPointFieldMapper toType(FieldMapper in) { return (GeoPointFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { final Parameter> ignoreMalformed; final Parameter> ignoreZValue = ignoreZValueParam(m -> builder(m).ignoreZValue.get()); @@ -94,7 +94,6 @@ public static class Builder extends FieldMapper.Builder { private final Parameter dimension; // can only support time_series_dimension: false private final IndexMode indexMode; // either STANDARD or TIME_SERIES - @SuppressWarnings("this-escape") public Builder( String name, ScriptCompiler scriptCompiler, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 7d6b7711360fe..80fd384f15fb7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -69,7 +69,7 @@ private static IpFieldMapper toType(FieldMapper in) { return (IpFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -89,7 +89,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final ScriptCompiler scriptCompiler; - @SuppressWarnings("this-escape") public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault, IndexVersion indexCreatedVersion) { super(name); this.scriptCompiler = Objects.requireNonNull(scriptCompiler); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 9bc3db22365de..f15bb0069570f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -137,7 +137,7 @@ private static KeywordFieldMapper toType(FieldMapper in) { return (KeywordFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -184,7 +184,6 @@ public static class Builder extends FieldMapper.Builder { private final ScriptCompiler scriptCompiler; private final IndexVersion indexCreatedVersion; - @SuppressWarnings("this-escape") public Builder(String name, IndexAnalyzers indexAnalyzers, ScriptCompiler scriptCompiler, IndexVersion indexCreatedVersion) { super(name); this.indexAnalyzers = indexAnalyzers; @@ -1008,7 +1007,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { return syntheticFieldLoader(simpleName()); } - protected SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String simpleName) { + SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String simpleName) { if (hasScript()) { return SourceLoader.SyntheticFieldLoader.NOTHING; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 7dfc5a98037d0..84e9e84fb8ceb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -89,7 +89,7 @@ private static NumberFieldMapper toType(FieldMapper in) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -143,7 +143,6 @@ public static Builder docValuesOnly(String name, NumberType type, IndexVersion i return builder; } - @SuppressWarnings("this-escape") public Builder( String name, NumberType type, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 91616041f65f6..1949249b9be2d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -87,7 +87,7 @@ import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; /** A {@link FieldMapper} for full-text fields. */ -public class TextFieldMapper extends FieldMapper { +public final class TextFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "text"; private static final String FAST_PHRASE_SUFFIX = "._index_phrase"; @@ -1155,8 +1155,7 @@ public Query existsQuery(SearchExecutionContext context) { private final SubFieldInfo prefixFieldInfo; private final SubFieldInfo phraseFieldInfo; - @SuppressWarnings("this-escape") - protected TextFieldMapper( + private TextFieldMapper( String simpleName, FieldType fieldType, TextFieldType mappedFieldType, diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 2533b5b611068..b869096c12fb2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -50,7 +50,7 @@ * A query that matches on multiple text fields, as if the field contents had been indexed * into a single combined field. */ -public class CombinedFieldsQueryBuilder extends AbstractQueryBuilder { +public final class CombinedFieldsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "combined_fields"; private static final ParseField QUERY_FIELD = new ParseField("query"); @@ -109,7 +109,6 @@ public class CombinedFieldsQueryBuilder extends AbstractQueryBuilder { +public final class MultiMatchQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "multi_match"; private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "cutoff_freqency is not supported." @@ -185,7 +185,6 @@ public MultiMatchQueryBuilder.Type getType() { /** * Constructs a new text query. */ - @SuppressWarnings("this-escape") public MultiMatchQueryBuilder(Object value, String... fields) { if (value == null) { throw new IllegalArgumentException("[" + NAME + "] requires query value"); @@ -203,7 +202,6 @@ public MultiMatchQueryBuilder(Object value, String... fields) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public MultiMatchQueryBuilder(StreamInput in) throws IOException { super(in); value = in.readGenericValue(); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java index 0b0b35b61953d..fd290e56f7e47 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java @@ -18,7 +18,7 @@ /** * Exception that is thrown when creating lucene queries on the shard */ -public class QueryShardException extends ElasticsearchException { +public final class QueryShardException extends ElasticsearchException { public QueryShardException(QueryRewriteContext context, String msg, Object... args) { this(context, msg, null, args); @@ -32,7 +32,6 @@ public QueryShardException(QueryRewriteContext context, String msg, Throwable ca * This constructor is provided for use in unit tests where a * {@link SearchExecutionContext} may not be available */ - @SuppressWarnings("this-escape") public QueryShardException(Index index, String msg, Throwable cause, Object... args) { super(msg, cause, args); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 195e1d51c806c..38ca88e8a9379 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -45,7 +45,7 @@ * (using {@link #field(String)}), will run the parsed query against the provided fields, and combine * them using Dismax. */ -public class QueryStringQueryBuilder extends AbstractQueryBuilder { +public final class QueryStringQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "query_string"; @@ -153,7 +153,6 @@ public QueryStringQueryBuilder(String queryString) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public QueryStringQueryBuilder(StreamInput in) throws IOException { super(in); queryString = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 8186c9c2d9a01..c4806dbd3a0a8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -98,6 +98,8 @@ public class SearchExecutionContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private NestedScope nestedScope; + private QueryBuilder aliasFilter; + /** * Build a {@linkplain SearchExecutionContext}. */ @@ -228,6 +230,15 @@ private void reset() { this.nestedScope = new NestedScope(); } + // Set alias filter, so it can be applied for queries that need it (e.g. knn query) + public void setAliasFilter(QueryBuilder aliasFilter) { + this.aliasFilter = aliasFilter; + } + + public QueryBuilder getAliasFilter() { + return aliasFilter; + } + /** * The similarity to use in searches, which takes into account per-field configuration. */ diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 4f6ba803eb7ac..b2067549fab67 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -69,7 +69,7 @@ * "https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html" * > online documentation. */ -public class SimpleQueryStringBuilder extends AbstractQueryBuilder { +public final class SimpleQueryStringBuilder extends AbstractQueryBuilder { /** Default for using lenient query parsing.*/ public static final boolean DEFAULT_LENIENT = false; @@ -142,7 +142,6 @@ public SimpleQueryStringBuilder(String queryText) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public SimpleQueryStringBuilder(StreamInput in) throws IOException { super(in); queryText = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java index d03c1c2db06ee..38ad67da24175 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java @@ -13,8 +13,7 @@ import java.io.IOException; -public class IndexShardRecoveryException extends ElasticsearchException { - @SuppressWarnings("this-escape") +public final class IndexShardRecoveryException extends ElasticsearchException { public IndexShardRecoveryException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index cf9fbf222fe0e..956858f094c95 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -238,7 +238,6 @@ void sync( } static class SnapshotSender extends AbstractRunnable implements ActionListener { - private final Logger logger; private final SyncAction syncAction; private final ResyncTask task; // to track progress private final String primaryAllocationId; @@ -270,7 +269,6 @@ static class SnapshotSender extends AbstractRunnable implements ActionListener listener ) { - this.logger = PrimaryReplicaSyncer.logger; this.syncAction = syncAction; this.task = task; this.shardId = shardId; @@ -353,7 +351,7 @@ protected void doRun() throws Exception { maxSeenAutoIdTimestamp, operations.toArray(EMPTY_ARRAY) ); - logger.trace( + PrimaryReplicaSyncer.logger.trace( "{} sending batch of [{}][{}] (total sent: [{}], skipped: [{}])", shardId, operations.size(), @@ -364,7 +362,12 @@ protected void doRun() throws Exception { firstMessage.set(false); syncAction.sync(request, task, primaryAllocationId, primaryTerm, this); } else if (closed.compareAndSet(false, true)) { - logger.trace("{} resync completed (total sent: [{}], skipped: [{}])", shardId, totalSentOps.get(), totalSkippedOps.get()); + PrimaryReplicaSyncer.logger.trace( + "{} resync completed (total sent: [{}], skipped: [{}])", + shardId, + totalSentOps.get(), + totalSkippedOps.get() + ); listener.onResponse(null); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java b/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java index fa66d8fe86ad9..8c9ab0e30b28f 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class ShardNotFoundException extends ResourceNotFoundException { +public final class ShardNotFoundException extends ResourceNotFoundException { public ShardNotFoundException(ShardId shardId) { this(shardId, null); } @@ -26,7 +26,6 @@ public ShardNotFoundException(ShardId shardId, String msg, Object... args) { this(shardId, msg, null, args); } - @SuppressWarnings("this-escape") public ShardNotFoundException(ShardId shardId, String msg, Throwable ex, Object... args) { super(msg, ex, args); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java index 843f049d30e41..140c4684d1a70 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java @@ -119,9 +119,8 @@ public synchronized Copy moveToStarted( this.totalFileCount = totalFileCount; this.incrementalSize = incrementalSize; this.totalSize = totalSize; - } else if (isAborted()) { - throw new AbortedSnapshotException(); } else { + ensureNotAborted(); assert false : "Should not try to move stage [" + stage.get() + "] to [STARTED]"; throw new IllegalStateException( "Unable to move the shard snapshot status to [STARTED]: " + "expecting [INIT] but got [" + stage.get() + "]" @@ -195,12 +194,8 @@ public ShardSnapshotResult getShardSnapshotResult() { return shardSnapshotResult.get(); } - public boolean isAborted() { - return stage.get() == Stage.ABORTED; - } - public void ensureNotAborted() { - if (isAborted()) { + if (stage.get() == Stage.ABORTED) { throw new AbortedSnapshotException(); } } @@ -243,15 +238,15 @@ public static IndexShardSnapshotStatus newInitializing(ShardGeneration generatio return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, 0, null, generation); } - public static IndexShardSnapshotStatus newFailed(final String failure) { + public static IndexShardSnapshotStatus.Copy newFailed(final String failure) { assert failure != null : "expecting non null failure for a failed IndexShardSnapshotStatus"; if (failure == null) { throw new IllegalArgumentException("A failure description is required for a failed IndexShardSnapshotStatus"); } - return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure, null); + return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure, null).asCopy(); } - public static IndexShardSnapshotStatus newDone( + public static IndexShardSnapshotStatus.Copy newDone( final long startTime, final long totalTime, final int incrementalFileCount, @@ -273,7 +268,7 @@ public static IndexShardSnapshotStatus newDone( incrementalSize, null, generation - ); + ).asCopy(); } /** diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 5b67f58f06a35..2a8fe96151c11 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -38,7 +38,7 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment { /** * Information about snapshotted file */ - public static class FileInfo implements Writeable { + public static final class FileInfo implements Writeable { public static final String SERIALIZE_WRITER_UUID = "serialize_writer_uuid"; private final String name; @@ -55,7 +55,6 @@ public static class FileInfo implements Writeable { * @param metadata the files meta data * @param partSize size of the single chunk */ - @SuppressWarnings("this-escape") public FileInfo(String name, StoreFileMetadata metadata, @Nullable ByteSizeValue partSize) { this.name = Objects.requireNonNull(name); this.metadata = metadata; diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java index 000b36b299fd0..78b9b6424ece2 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java @@ -14,13 +14,12 @@ import java.io.IOException; -public class TranslogException extends ElasticsearchException { +public final class TranslogException extends ElasticsearchException { public TranslogException(ShardId shardId, String msg) { this(shardId, msg, null); } - @SuppressWarnings("this-escape") public TranslogException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java b/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java index 371444b6a72da..744f8c2ed3033 100644 --- a/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java +++ b/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java @@ -14,9 +14,8 @@ import java.io.IOException; -public class AliasFilterParsingException extends ElasticsearchException { +public final class AliasFilterParsingException extends ElasticsearchException { - @SuppressWarnings("this-escape") public AliasFilterParsingException(Index index, String name, String desc, Throwable ex) { super("[" + name + "], " + desc, ex); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java b/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java index 4c4035dbc6357..f64e6758130be 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java @@ -18,9 +18,8 @@ /** * Exception indicating that one or more requested indices are closed. */ -public class IndexClosedException extends ElasticsearchException { +public final class IndexClosedException extends ElasticsearchException { - @SuppressWarnings("this-escape") public IndexClosedException(Index index) { super("closed"); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java b/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java index a38bcc8ae02de..4558c5a7bc2bc 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java @@ -14,9 +14,8 @@ import java.io.IOException; -public class IndexCreationException extends ElasticsearchException implements ElasticsearchWrapperException { +public final class IndexCreationException extends ElasticsearchException implements ElasticsearchWrapperException { - @SuppressWarnings("this-escape") public IndexCreationException(String index, Throwable cause) { super("failed to create index [{}]", cause, index); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java b/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java index 819378bae0fed..1591ae4428fec 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java @@ -19,12 +19,11 @@ * Thrown when some action cannot be performed because the primary shard of * some shard group in an index has not been allocated post api action. */ -public class IndexPrimaryShardNotAllocatedException extends ElasticsearchException { +public final class IndexPrimaryShardNotAllocatedException extends ElasticsearchException { public IndexPrimaryShardNotAllocatedException(StreamInput in) throws IOException { super(in); } - @SuppressWarnings("this-escape") public IndexPrimaryShardNotAllocatedException(Index index) { super("primary not allocated post api"); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 61430b08143aa..bcd5b6015df51 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -335,7 +335,7 @@ public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, lon } }); this.cleanInterval = INDICES_CACHE_CLEAN_INTERVAL_SETTING.get(settings); - this.cacheCleaner = new CacheCleaner(indicesFieldDataCache, indicesRequestCache, logger, threadPool, this.cleanInterval); + this.cacheCleaner = new CacheCleaner(indicesFieldDataCache, indicesRequestCache, threadPool, this.cleanInterval); this.metaStateService = metaStateService; this.engineFactoryProviders = engineFactoryProviders; @@ -1440,22 +1440,14 @@ public AnalysisRegistry getAnalysis() { private static final class CacheCleaner implements Runnable, Releasable { private final IndicesFieldDataCache cache; - private final Logger logger; private final ThreadPool threadPool; private final TimeValue interval; private final AtomicBoolean closed = new AtomicBoolean(false); private final IndicesRequestCache requestCache; - CacheCleaner( - IndicesFieldDataCache cache, - IndicesRequestCache requestCache, - Logger logger, - ThreadPool threadPool, - TimeValue interval - ) { + CacheCleaner(IndicesFieldDataCache cache, IndicesRequestCache requestCache, ThreadPool threadPool, TimeValue interval) { this.cache = cache; this.requestCache = requestCache; - this.logger = logger; this.threadPool = threadPool; this.interval = interval; } diff --git a/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java b/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java index 4b06757652f75..b9d25b83d24ec 100644 --- a/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java +++ b/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java @@ -15,9 +15,8 @@ import java.io.IOException; -public class InvalidAliasNameException extends ElasticsearchException { +public final class InvalidAliasNameException extends ElasticsearchException { - @SuppressWarnings("this-escape") public InvalidAliasNameException(Index index, String name, String desc) { super("Invalid alias name [{}], {}", name, desc); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java b/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java index fec791364cf94..7a1e1c8cede43 100644 --- a/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java +++ b/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java @@ -15,15 +15,13 @@ import java.io.IOException; -public class InvalidIndexNameException extends ElasticsearchException { +public final class InvalidIndexNameException extends ElasticsearchException { - @SuppressWarnings("this-escape") public InvalidIndexNameException(String name, String desc) { super("Invalid index name [" + name + "], " + desc); setIndex(name); } - @SuppressWarnings("this-escape") public InvalidIndexNameException(Index index, String name, String desc) { super("Invalid index name [" + name + "], " + desc); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java b/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java index c53b72cbe3803..ab4c43397b12a 100644 --- a/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java +++ b/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java @@ -16,21 +16,18 @@ import java.io.IOException; import java.util.Arrays; -public class TypeMissingException extends ElasticsearchException { +public final class TypeMissingException extends ElasticsearchException { - @SuppressWarnings("this-escape") public TypeMissingException(Index index, String... types) { super("type" + Arrays.toString(types) + " missing"); setIndex(index); } - @SuppressWarnings("this-escape") public TypeMissingException(Index index, Throwable cause, String... types) { super("type" + Arrays.toString(types) + " missing", cause); setIndex(index); } - @SuppressWarnings("this-escape") public TypeMissingException(String index, String... types) { super("type[" + Arrays.toString(types) + "] missing"); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 36a89f4c0d407..d692c331927d2 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -65,7 +65,7 @@ * * @see org.elasticsearch.index.analysis.HunspellTokenFilterFactory */ -public class HunspellService { +public final class HunspellService { private static final Logger logger = LogManager.getLogger(HunspellService.class); @@ -89,7 +89,6 @@ public class HunspellService { private final Path hunspellDir; private final Function loadingFunction; - @SuppressWarnings("this-escape") public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) throws IOException { this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); diff --git a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 76de0d43b7f2e..cf9378aabb993 100644 --- a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -39,7 +39,7 @@ import java.util.List; import java.util.function.ToLongBiFunction; -public class IndicesFieldDataCache implements RemovalListener, Releasable { +public final class IndicesFieldDataCache implements RemovalListener, Releasable { private static final Logger logger = LogManager.getLogger(IndicesFieldDataCache.class); @@ -51,7 +51,6 @@ public class IndicesFieldDataCache implements RemovalListener cache; - @SuppressWarnings("this-escape") public IndicesFieldDataCache(Settings settings, IndexFieldDataCache.Listener indicesFieldDataCacheListener) { this.indicesFieldDataCacheListener = indicesFieldDataCacheListener; final long sizeInBytes = INDICES_FIELDDATA_CACHE_SIZE_KEY.get(settings).getBytes(); @@ -68,7 +67,7 @@ public void close() { } public IndexFieldDataCache buildIndexFieldDataCache(IndexFieldDataCache.Listener listener, Index index, String fieldName) { - return new IndexFieldCache(logger, cache, index, fieldName, indicesFieldDataCacheListener, listener); + return new IndexFieldCache(cache, index, fieldName, indicesFieldDataCacheListener, listener); } public Cache getCache() { @@ -108,14 +107,12 @@ public long applyAsLong(Key key, Accountable ramUsage) { * A specific cache instance for the relevant parameters of it (index, fieldNames, fieldType). */ static class IndexFieldCache implements IndexFieldDataCache, IndexReader.ClosedListener { - private final Logger logger; final Index index; final String fieldName; private final Cache cache; private final Listener[] listeners; - IndexFieldCache(Logger logger, final Cache cache, Index index, String fieldName, Listener... listeners) { - this.logger = logger; + IndexFieldCache(final Cache cache, Index index, String fieldName, Listener... listeners) { this.listeners = listeners; this.index = index; this.fieldName = fieldName; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java index 1a40f7526240c..87f491a598d52 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java @@ -18,13 +18,12 @@ import java.io.IOException; import java.util.Objects; -public class RecoverFilesRecoveryException extends ElasticsearchException implements ElasticsearchWrapperException { +public final class RecoverFilesRecoveryException extends ElasticsearchException implements ElasticsearchWrapperException { private final int numberOfFiles; private final ByteSizeValue totalFilesSize; - @SuppressWarnings("this-escape") public RecoverFilesRecoveryException(ShardId shardId, int numberOfFiles, ByteSizeValue totalFilesSize, Throwable cause) { super("Failed to transfer [{}] files with total size of [{}]", cause, numberOfFiles, totalFilesSize); Objects.requireNonNull(totalFilesSize, "totalFilesSize must not be null"); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java index d89a429dc853f..c42f88c9b843a 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java @@ -14,8 +14,7 @@ import java.io.IOException; -public class RecoveryCommitTooNewException extends ElasticsearchException { - @SuppressWarnings("this-escape") +public final class RecoveryCommitTooNewException extends ElasticsearchException { public RecoveryCommitTooNewException(ShardId shardId, String message) { super(message); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java b/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java index 91592d5f8349b..07d62fb87fe55 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java @@ -54,7 +54,7 @@ import static org.elasticsearch.indices.recovery.RecoverySettings.SNAPSHOT_RECOVERIES_SUPPORTED_VERSION; public class ShardSnapshotsService { - private final Logger logger = LogManager.getLogger(ShardSnapshotsService.class); + private static final Logger logger = LogManager.getLogger(ShardSnapshotsService.class); private final Client client; private final RepositoriesService repositoriesService; diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 6c32ebd491edd..d631c7a11d10c 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -64,7 +64,7 @@ import static org.elasticsearch.core.Strings.format; -public class IndicesStore implements ClusterStateListener, Closeable { +public final class IndicesStore implements ClusterStateListener, Closeable { private static final Logger logger = LogManager.getLogger(IndicesStore.class); @@ -88,7 +88,6 @@ public class IndicesStore implements ClusterStateListener, Closeable { private final TimeValue deleteShardTimeout; - @SuppressWarnings("this-escape") @Inject public IndicesStore( Settings settings, diff --git a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java new file mode 100644 index 0000000000000..24bfef4ec3137 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * This class defines an empty task settings object. This is useful for services that do not have any task settings. + */ +public record EmptyTaskSettings() implements TaskSettings { + public static final String NAME = "empty_task_settings"; + + public static EmptyTaskSettings INSTANCE = new EmptyTaskSettings(); + + public EmptyTaskSettings(StreamInput in) { + this(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} +} diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 82ce13e591b6c..2d7ee9f210e64 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -8,6 +8,7 @@ package org.elasticsearch.inference; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import java.io.Closeable; @@ -76,4 +77,10 @@ public interface InferenceService extends Closeable { default boolean isInClusterService() { return false; } + + /** + * Defines the version required across all clusters to use this service + * @return {@link TransportVersion} specifying the version + */ + TransportVersion getMinimalSupportedVersion(); } diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java index a8ae380bd3ba1..cdccca7eb0c0e 100644 --- a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java +++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java @@ -33,6 +33,13 @@ public class ModelConfigurations implements ToXContentObject, VersionedNamedWrit private final ServiceSettings serviceSettings; private final TaskSettings taskSettings; + /** + * Allows no task settings to be defined. This will default to the {@link EmptyTaskSettings} object. + */ + public ModelConfigurations(String modelId, TaskType taskType, String service, ServiceSettings serviceSettings) { + this(modelId, taskType, service, serviceSettings, EmptyTaskSettings.INSTANCE); + } + public ModelConfigurations( String modelId, TaskType taskType, @@ -40,11 +47,11 @@ public ModelConfigurations( ServiceSettings serviceSettings, TaskSettings taskSettings ) { - this.modelId = modelId; - this.taskType = taskType; - this.service = service; - this.serviceSettings = serviceSettings; - this.taskSettings = taskSettings; + this.modelId = Objects.requireNonNull(modelId); + this.taskType = Objects.requireNonNull(taskType); + this.service = Objects.requireNonNull(service); + this.serviceSettings = Objects.requireNonNull(serviceSettings); + this.taskSettings = Objects.requireNonNull(taskSettings); } public ModelConfigurations(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java index 6b3eb3ef52c97..186de504241e7 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -28,6 +28,7 @@ public final class Pipeline { public static final String VERSION_KEY = "version"; public static final String ON_FAILURE_KEY = "on_failure"; public static final String META_KEY = "_meta"; + public static final String DEPRECATED_KEY = "deprecated"; private final String id; @Nullable @@ -39,6 +40,8 @@ public final class Pipeline { private final CompoundProcessor compoundProcessor; private final IngestMetric metrics; private final LongSupplier relativeTimeProvider; + @Nullable + private final Boolean deprecated; public Pipeline( String id, @@ -47,7 +50,18 @@ public Pipeline( @Nullable Map metadata, CompoundProcessor compoundProcessor ) { - this(id, description, version, metadata, compoundProcessor, System::nanoTime); + this(id, description, version, metadata, compoundProcessor, null); + } + + public Pipeline( + String id, + @Nullable String description, + @Nullable Integer version, + @Nullable Map metadata, + CompoundProcessor compoundProcessor, + @Nullable Boolean deprecated + ) { + this(id, description, version, metadata, compoundProcessor, System::nanoTime, deprecated); } // package private for testing @@ -57,7 +71,8 @@ public Pipeline( @Nullable Integer version, @Nullable Map metadata, CompoundProcessor compoundProcessor, - LongSupplier relativeTimeProvider + LongSupplier relativeTimeProvider, + @Nullable Boolean deprecated ) { this.id = id; this.description = description; @@ -66,6 +81,7 @@ public Pipeline( this.version = version; this.metrics = new IngestMetric(); this.relativeTimeProvider = relativeTimeProvider; + this.deprecated = deprecated; } public static Pipeline create( @@ -77,6 +93,7 @@ public static Pipeline create( String description = ConfigurationUtils.readOptionalStringProperty(null, null, config, DESCRIPTION_KEY); Integer version = ConfigurationUtils.readIntProperty(null, null, config, VERSION_KEY, null); Map metadata = ConfigurationUtils.readOptionalMap(null, null, config, META_KEY); + Boolean deprecated = ConfigurationUtils.readOptionalBooleanProperty(null, null, config, DEPRECATED_KEY); List> processorConfigs = ConfigurationUtils.readList(null, null, config, PROCESSORS_KEY); List processors = ConfigurationUtils.readProcessorConfigs(processorConfigs, scriptService, processorFactories); List> onFailureProcessorConfigs = ConfigurationUtils.readOptionalList(null, null, config, ON_FAILURE_KEY); @@ -97,7 +114,7 @@ public static Pipeline create( throw new ElasticsearchParseException("pipeline [" + id + "] cannot have an empty on_failure option defined"); } CompoundProcessor compoundProcessor = new CompoundProcessor(false, processors, onFailureProcessors); - return new Pipeline(id, description, version, metadata, compoundProcessor); + return new Pipeline(id, description, version, metadata, compoundProcessor, deprecated); } /** @@ -185,4 +202,12 @@ public List flattenAllProcessors() { public IngestMetric getMetrics() { return metrics; } + + public Boolean getDeprecated() { + return deprecated; + } + + public boolean isDeprecated() { + return Boolean.TRUE.equals(deprecated); + } } diff --git a/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java index 5bd811962a4c4..992b3c14e49c2 100644 --- a/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java @@ -132,7 +132,8 @@ public void execute(IngestDocument ingestDocument, BiConsumer clusterService.state().nodesIfRecovered()); + actionModule.initRestHandlers(() -> clusterService.state().nodesIfRecovered(), f -> { + ClusterState state = clusterService.state(); + return state.clusterRecovered() && featureService.clusterHasFeature(state, f); + }); logger.info("initialized"); } diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 38d67efa734b8..ba7b4bb51d9c7 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -46,7 +46,7 @@ /** * Component that runs only on the master node and is responsible for assigning running tasks to nodes */ -public class PersistentTasksClusterService implements ClusterStateListener, Closeable { +public final class PersistentTasksClusterService implements ClusterStateListener, Closeable { public static final Setting CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING = Setting.timeSetting( "cluster.persistent_tasks.allocation.recheck_interval", @@ -65,7 +65,6 @@ public class PersistentTasksClusterService implements ClusterStateListener, Clos private final PeriodicRechecker periodicRechecker; private final AtomicBoolean reassigningTasks = new AtomicBoolean(false); - @SuppressWarnings("this-escape") public PersistentTasksClusterService( Settings settings, PersistentTasksExecutorRegistry registry, diff --git a/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java b/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java index ebabec42ef11b..ae600dfda39a9 100644 --- a/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java +++ b/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java @@ -28,7 +28,7 @@ * * @see Allocation */ -public class EnableAssignmentDecider { +public final class EnableAssignmentDecider { public static final Setting CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING = new Setting<>( "cluster.persistent_tasks.allocation.enable", @@ -41,7 +41,6 @@ public class EnableAssignmentDecider { private volatile Allocation enableAssignment; - @SuppressWarnings("this-escape") public EnableAssignmentDecider(final Settings settings, final ClusterSettings clusterSettings) { this.enableAssignment = CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING, this::setEnableAssignment); diff --git a/server/src/main/java/org/elasticsearch/plugins/scanners/NamedComponentReader.java b/server/src/main/java/org/elasticsearch/plugins/scanners/NamedComponentReader.java index e007cac442f89..ed32bd245977a 100644 --- a/server/src/main/java/org/elasticsearch/plugins/scanners/NamedComponentReader.java +++ b/server/src/main/java/org/elasticsearch/plugins/scanners/NamedComponentReader.java @@ -35,7 +35,7 @@ */ public class NamedComponentReader { - private Logger logger = LogManager.getLogger(NamedComponentReader.class); + private static final Logger logger = LogManager.getLogger(NamedComponentReader.class); private static final String NAMED_COMPONENTS_FILE_NAME = "named_components.json"; /** * a registry of known classes marked or indirectly marked (extending marked class) with @Extensible diff --git a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java index b4dd0a2f37b39..c88bbcfa91b98 100644 --- a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java @@ -129,7 +129,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return in.getShardSnapshotStatus(snapshotId, indexId, shardId); } diff --git a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java index ad0f956a16643..6bd967d84c89b 100644 --- a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java @@ -137,7 +137,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { throw createCreationException(); } diff --git a/server/src/main/java/org/elasticsearch/repositories/Repository.java b/server/src/main/java/org/elasticsearch/repositories/Repository.java index 1fd01631818bc..5782dedf3cfbc 100644 --- a/server/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/server/src/main/java/org/elasticsearch/repositories/Repository.java @@ -208,7 +208,7 @@ default RepositoryStats stats() { * Creates a snapshot of the shard referenced by the given {@link SnapshotShardContext}. *

* As snapshot process progresses, implementation of this method should update {@link IndexShardSnapshotStatus} object returned by - * {@link SnapshotShardContext#status()} and check its {@link IndexShardSnapshotStatus#isAborted()} to see if the snapshot process + * {@link SnapshotShardContext#status()} and call {@link IndexShardSnapshotStatus#ensureNotAborted()} to see if the snapshot process * should be aborted. * * @param snapshotShardContext snapshot shard context that must be completed via {@link SnapshotShardContext#onResponse} or @@ -244,7 +244,7 @@ void restoreShard( * @param shardId shard id * @return snapshot status */ - IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId); + IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId); /** * Check if this instances {@link Settings} can be changed to the provided updated settings without recreating the repository. diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 1c5ea5a2b0012..17ac4ef38f1b6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -1229,6 +1229,22 @@ public int hashCode() { return Objects.hash(snapshotState, version, startTimeMillis, endTimeMillis, slmPolicy); } + @Override + public String toString() { + return "SnapshotDetails{" + + "snapshotState=" + + snapshotState + + ", version=" + + version + + ", startTimeMillis=" + + startTimeMillis + + ", endTimeMillis=" + + endTimeMillis + + ", slmPolicy='" + + slmPolicy + + "'}"; + } + public static SnapshotDetails fromSnapshotInfo(SnapshotInfo snapshotInfo) { return new SnapshotDetails( snapshotInfo.state(), diff --git a/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java b/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java index 43594aa6047e8..b041f51afa6d2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java +++ b/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java @@ -19,13 +19,12 @@ * A (closeable) {@link IndexCommit} plus ref-counting to keep track of active users, and with the facility to drop the "main" initial ref * early if the shard snapshot is aborted. */ -public class SnapshotIndexCommit extends AbstractRefCounted { +public final class SnapshotIndexCommit extends AbstractRefCounted { private final Engine.IndexCommitRef commitRef; private final Runnable releaseInitialRef; private final SubscribableListener completionListeners = new SubscribableListener<>(); - @SuppressWarnings("this-escape") public SnapshotIndexCommit(Engine.IndexCommitRef commitRef) { this.commitRef = commitRef; this.releaseInitialRef = new RunOnce(this::decRef); diff --git a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java index b9da0b1663c59..30f167d8c5cf6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java @@ -135,7 +135,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { throw createUnknownTypeException(); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 743e978181f3a..4167717e09006 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -3040,10 +3040,7 @@ private void doSnapshotShard(SnapshotShardContext context) { } } for (String fileName : fileNames) { - if (snapshotStatus.isAborted()) { - logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileName); - throw new AbortedSnapshotException(); - } + ensureNotAborted(shardId, snapshotId, snapshotStatus, fileName); logger.trace("[{}] [{}] Processing [{}]", shardId, snapshotId, fileName); final StoreFileMetadata md = metadataFromStore.get(fileName); @@ -3245,6 +3242,16 @@ private void doSnapshotShard(SnapshotShardContext context) { } } + private static void ensureNotAborted(ShardId shardId, SnapshotId snapshotId, IndexShardSnapshotStatus snapshotStatus, String fileName) { + try { + snapshotStatus.ensureNotAborted(); + } catch (Exception e) { + logger.debug("[{}] [{}] {} on the file [{}], exiting", shardId, snapshotId, e.getMessage(), fileName); + assert e instanceof AbortedSnapshotException : e; + throw e; + } + } + protected void snapshotFiles( SnapshotShardContext context, BlockingQueue filesToSnapshot, @@ -3273,7 +3280,12 @@ private static boolean assertFileContentsMatchHash( store.decRef(); } } else { - assert snapshotStatus.isAborted() : "if the store is already closed we must have been aborted"; + try { + snapshotStatus.ensureNotAborted(); + assert false : "if the store is already closed we must have been aborted"; + } catch (Exception e) { + assert e instanceof AbortedSnapshotException : e; + } } return true; } @@ -3497,7 +3509,7 @@ public InputStream maybeRateLimitSnapshots(InputStream stream, RateLimitingInput } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { BlobStoreIndexShardSnapshot snapshot = loadShardSnapshot(shardContainer(indexId, shardId), snapshotId); return IndexShardSnapshotStatus.newDone( snapshot.startTime(), @@ -3506,8 +3518,8 @@ public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, In snapshot.totalFileCount(), snapshot.incrementalSize(), snapshot.totalSize(), - null - ); // Not adding a real generation here as it doesn't matter to callers + null // Not adding a real generation here as it doesn't matter to callers + ); } @Override @@ -3712,10 +3724,7 @@ public int read(byte[] b, int off, int len) throws IOException { } private void checkAborted() { - if (snapshotStatus.isAborted()) { - logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileInfo.physicalName()); - throw new AbortedSnapshotException(); - } + ensureNotAborted(shardId, snapshotId, snapshotStatus, fileInfo.physicalName()); } }; final String partName = fileInfo.partName(i); diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java new file mode 100644 index 0000000000000..73b788d63b2ab --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.Version; +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; + +import java.util.Map; + +public class RestFeatures implements FeatureSpecification { + @Override + public Map getHistoricalFeatures() { + return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); + } +} diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java index 73b24b21e5462..55adc67bf18e6 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -36,7 +36,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER; -public class RestResponse { +public final class RestResponse { public static final String TEXT_CONTENT_TYPE = "text/plain; charset=UTF-8"; @@ -111,7 +111,6 @@ public RestResponse(RestChannel channel, Exception e) throws IOException { this(channel, ExceptionsHelper.status(e), e); } - @SuppressWarnings("this-escape") public RestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException { this.status = status; ToXContent.Params params = channel.request(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index 189bd9c2b9551..7748944306e35 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -8,16 +8,15 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,28 +26,30 @@ import java.io.IOException; import java.util.List; import java.util.Set; -import java.util.function.Supplier; +import java.util.function.Predicate; import static org.elasticsearch.rest.RestRequest.Method.GET; @ServerlessScope(Scope.INTERNAL) public class RestClusterGetSettingsAction extends BaseRestHandler { + public static final NodeFeature SUPPORTS_GET_SETTINGS_ACTION = new NodeFeature("rest.get_settings_action"); + private final Settings settings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; - private final Supplier nodesInCluster; + private final Predicate clusterSupportsFeature; public RestClusterGetSettingsAction( Settings settings, ClusterSettings clusterSettings, SettingsFilter settingsFilter, - Supplier nodesInCluster + Predicate clusterSupportsFeature ) { this.settings = settings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; - this.nodesInCluster = nodesInCluster; + this.clusterSupportsFeature = clusterSupportsFeature; } @Override @@ -70,7 +71,7 @@ private static void setUpRequestParams(MasterNodeReadRequest clusterRequest, public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); - if (nodesInCluster.get().getMinNodeVersion().before(Version.V_8_3_0)) { + if (clusterSupportsFeature.test(SUPPORTS_GET_SETTINGS_ACTION) == false) { return prepareLegacyRequest(request, client, renderDefaults); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java index 4ebe5350e055b..e46468205da61 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java @@ -13,9 +13,8 @@ import java.io.IOException; import java.util.Arrays; -public class AliasesNotFoundException extends ResourceNotFoundException { +public final class AliasesNotFoundException extends ResourceNotFoundException { - @SuppressWarnings("this-escape") public AliasesNotFoundException(String... names) { super("aliases " + Arrays.toString(names) + " missing"); this.setResources("aliases", names); diff --git a/server/src/main/java/org/elasticsearch/script/field/WriteField.java b/server/src/main/java/org/elasticsearch/script/field/WriteField.java index 6a50434b4004a..a420c8c7c0858 100644 --- a/server/src/main/java/org/elasticsearch/script/field/WriteField.java +++ b/server/src/main/java/org/elasticsearch/script/field/WriteField.java @@ -23,16 +23,15 @@ import java.util.function.Predicate; import java.util.function.Supplier; -public class WriteField implements Field { - protected String path; - protected Supplier> rootSupplier; +public final class WriteField implements Field { + private String path; + private Supplier> rootSupplier; - protected Map container; - protected String leaf; + private Map container; + private String leaf; private static final Object MISSING = new Object(); - @SuppressWarnings("this-escape") public WriteField(String path, Supplier> rootSupplier) { this.path = path; this.rootSupplier = rootSupplier; @@ -501,7 +500,7 @@ public NestedDocument doc(int index) { * If there is a value that is not a List or a Map, {@throws IllegalStateException}. */ @SuppressWarnings("unchecked") - protected List> getDocsAsList() { + private List> getDocsAsList() { Object value = get(MISSING); if (value == MISSING) { return null; @@ -604,7 +603,7 @@ public void remove() { * Change the path and clear the existing resolution by setting {@link #leaf} and {@link #container} to null. * Caller needs to re-resolve after this call. */ - protected void setPath(String path) { + private void setPath(String path) { this.path = path; this.leaf = null; this.container = null; @@ -613,7 +612,7 @@ protected void setPath(String path) { /** * Get the path to a leaf or create it if one does not exist. */ - protected void setLeaf() { + private void setLeaf() { if (leaf == null) { resolveDepthFlat(); } @@ -636,7 +635,7 @@ protected void setLeaf() { * {@link #container} and {@link #leaf} and non-null if resolved. */ @SuppressWarnings("unchecked") - protected void resolveDepthFlat() { + private void resolveDepthFlat() { container = rootSupplier.get(); int index = path.indexOf('.'); @@ -670,7 +669,7 @@ protected void resolveDepthFlat() { * @throws IllegalArgumentException if a non-leaf segment maps to a non-Map Object. */ @SuppressWarnings("unchecked") - protected void createDepth() { + private void createDepth() { container = rootSupplier.get(); String[] segments = path.split("\\."); @@ -692,7 +691,7 @@ protected void createDepth() { leaf = segments[segments.length - 1]; } - protected String typeName(Object value) { + private String typeName(Object value) { return value != null ? value.getClass().getName() : "null"; } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 111bec2c88509..8ba48563c8f55 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1125,9 +1125,7 @@ private void registerQueryParsers(List plugins) { ); registerQuery(new QuerySpec<>(GeoShapeQueryBuilder.NAME, GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(KnnVectorQueryBuilder.NAME, KnnVectorQueryBuilder::new, parser -> { - throw new IllegalArgumentException("[knn] queries cannot be provided directly, use the [knn] body parameter instead"); - })); + registerQuery(new QuerySpec<>(KnnVectorQueryBuilder.NAME, KnnVectorQueryBuilder::new, KnnVectorQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(KnnScoreDocQueryBuilder.NAME, KnnScoreDocQueryBuilder::new, parser -> { throw new IllegalArgumentException("[score_doc] queries cannot be provided directly"); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 6919cfdbc00b4..44a8f641fae91 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -1249,6 +1249,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc QueryBuilder query = source.query(); if (query != null) { InnerHitContextBuilder.extractInnerHits(query, innerHitBuilders); + searchExecutionContext.setAliasFilter(context.request().getAliasFilter().getQueryBuilder()); context.parsedQuery(searchExecutionContext.toQuery(query)); } if (source.postFilter() != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index 1d12b33390659..98c131213b3fe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -274,7 +274,7 @@ public Aggregator[] createSubAggregators(Aggregator parent, CardinalityUpperBoun * A mutable collection of {@link AggregationBuilder}s and * {@link PipelineAggregationBuilder}s. */ - public static class Builder implements Writeable, ToXContentObject { + public static final class Builder implements Writeable, ToXContentObject { private final Set names = new HashSet<>(); // Using LinkedHashSets to preserve the order of insertion, that makes the results @@ -290,7 +290,6 @@ public Builder() {} /** * Read from a stream. */ - @SuppressWarnings("this-escape") public Builder(StreamInput in) throws IOException { int factoriesSize = in.readVInt(); for (int i = 0; i < factoriesSize; i++) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java index fe7ff22ec2b3a..4efe8e4c1d5a1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java @@ -93,7 +93,7 @@ protected void metadataToXContent(XContentBuilder builder, Params params) throws * {@link Aggregator#buildAggregations} and {@link InternalAggregation#reduce}. */ public static class MultiBucketConsumer implements IntConsumer { - private final Logger logger = LogManager.getLogger(MultiBucketConsumer.class); + private static final Logger logger = LogManager.getLogger(MultiBucketConsumer.class); private final int limit; private final CircuitBreaker breaker; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index c9c30914da33a..dff95332d3f16 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -70,7 +70,7 @@ public final class CompositeAggregator extends BucketsAggregator implements SizedBucketAggregator { - private final Logger logger = LogManager.getLogger(CompositeAggregator.class); + private static final Logger logger = LogManager.getLogger(CompositeAggregator.class); private final int size; private final List sourceNames; private final int[] reverseMuls; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java index d51f14a516bc1..e0edebd7e5201 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java @@ -26,7 +26,7 @@ import java.io.IOException; import java.util.Map; -public class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { +public final class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geohash_grid"; public static final int DEFAULT_PRECISION = 5; public static final int DEFAULT_MAX_NUM_CELLS = 10000; @@ -41,7 +41,6 @@ public class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { GeoHashGridAggregationBuilder::new ); - @SuppressWarnings("this-escape") public GeoHashGridAggregationBuilder(String name) { super(name); precision(DEFAULT_PRECISION); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java index 76286fc1605a2..b7532bdcb4e5b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.Map; -public class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { +public final class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geotile_grid"; public static final int DEFAULT_PRECISION = 7; private static final int DEFAULT_MAX_NUM_CELLS = 10000; @@ -40,7 +40,6 @@ public class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { GeoTileGridAggregationBuilder::new ); - @SuppressWarnings("this-escape") public GeoTileGridAggregationBuilder(String name) { super(name); precision(DEFAULT_PRECISION); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 2062d7dd7394b..3beec89853b76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -24,10 +24,9 @@ import java.io.IOException; import java.util.Map; -public class GlobalAggregator extends BucketsAggregator implements SingleBucketAggregator { +public final class GlobalAggregator extends BucketsAggregator implements SingleBucketAggregator { private final Weight weight; - @SuppressWarnings("this-escape") public GlobalAggregator(String name, AggregatorFactories subFactories, AggregationContext context, Map metadata) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index a26507413128c..e4650ad9fdddf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -47,13 +47,12 @@ * An aggregator of string values that hashes the strings on the fly rather * than up front like the {@link GlobalOrdinalsStringTermsAggregator}. */ -public class MapStringTermsAggregator extends AbstractStringTermsAggregator { +public final class MapStringTermsAggregator extends AbstractStringTermsAggregator { private final CollectorSource collectorSource; private final ResultStrategy resultStrategy; private final BytesKeyedBucketOrds bucketOrds; private final IncludeExclude.StringFilter includeExclude; - @SuppressWarnings("this-escape") public MapStringTermsAggregator( String name, AggregatorFactories factories, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index b0d60962300b7..96d81aad86c4a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -45,13 +45,12 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; -public class NumericTermsAggregator extends TermsAggregator { +public final class NumericTermsAggregator extends TermsAggregator { private final ResultStrategy resultStrategy; private final ValuesSource.Numeric valuesSource; private final LongKeyedBucketOrds bucketOrds; private final LongFilter longFilter; - @SuppressWarnings("this-escape") public NumericTermsAggregator( String name, AggregatorFactories factories, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java index 92ff5cfb09c08..b8402208673d4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java @@ -120,7 +120,7 @@ public int hashCode() { return Objects.hash(method); } - public static class TDigest extends PercentilesConfig { + public static final class TDigest extends PercentilesConfig { static final double DEFAULT_COMPRESSION = 100.0; private double compression; @@ -134,7 +134,6 @@ public TDigest(double compression) { this(compression, null); } - @SuppressWarnings("this-escape") public TDigest(double compression, TDigestExecutionHint executionHint) { super(PercentilesMethod.TDIGEST); this.executionHint = executionHint; @@ -281,7 +280,7 @@ public int hashCode() { } } - public static class Hdr extends PercentilesConfig { + public static final class Hdr extends PercentilesConfig { static final int DEFAULT_NUMBER_SIG_FIGS = 3; private int numberOfSignificantValueDigits; @@ -289,7 +288,6 @@ public Hdr() { this(DEFAULT_NUMBER_SIG_FIGS); } - @SuppressWarnings("this-escape") public Hdr(int numberOfSignificantValueDigits) { super(PercentilesMethod.HDR); setNumberOfSignificantValueDigits(numberOfSignificantValueDigits); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java index e0c11530541ef..ae14b4601b559 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java @@ -31,14 +31,13 @@ * This aggregator works in a multi-bucket mode, that is, when serves as a sub-aggregator, a single aggregator instance aggregates the * counts for all buckets owned by the parent aggregator) */ -public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { +public final class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource valuesSource; // a count per bucket LongArray counts; - @SuppressWarnings("this-escape") public ValueCountAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index e72a0361e9dba..3269de91da606 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Map; -public class DfsSearchResult extends SearchPhaseResult { +public final class DfsSearchResult extends SearchPhaseResult { private static final Term[] EMPTY_TERMS = new Term[0]; private static final TermStatistics[] EMPTY_TERM_STATS = new TermStatistics[0]; @@ -37,7 +37,6 @@ public class DfsSearchResult extends SearchPhaseResult { private int maxDoc; private SearchProfileDfsPhaseResult searchProfileDfsPhaseResult; - @SuppressWarnings("this-escape") public DfsSearchResult(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); @@ -70,7 +69,6 @@ public DfsSearchResult(StreamInput in) throws IOException { } } - @SuppressWarnings("this-escape") public DfsSearchResult(ShardSearchContextId contextId, SearchShardTarget shardTarget, ShardSearchRequest shardSearchRequest) { this.setSearchShardTarget(shardTarget); this.contextId = contextId; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 2aeb36d75de62..5a04404c2e38a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -46,12 +46,11 @@ * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified * after reducing all of the matches returned by the query phase */ -public class FetchPhase { +public final class FetchPhase { private static final Logger LOGGER = LogManager.getLogger(FetchPhase.class); private final FetchSubPhase[] fetchSubPhases; - @SuppressWarnings("this-escape") public FetchPhase(List fetchSubPhases) { this.fetchSubPhases = fetchSubPhases.toArray(new FetchSubPhase[fetchSubPhases.size() + 1]); this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index cace74f4189fb..7d371ac372774 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -44,7 +44,7 @@ * * @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight() */ -public class HighlightBuilder extends AbstractHighlighterBuilder { +public final class HighlightBuilder extends AbstractHighlighterBuilder { /** default for whether to highlight fields based on the source even if stored separately */ public static final boolean DEFAULT_FORCE_SOURCE = false; /** default for whether a field should be highlighted only if a query matches that field */ @@ -124,7 +124,6 @@ public HighlightBuilder(HighlightBuilder template, QueryBuilder highlightQuery, /** * Read from a stream. */ - @SuppressWarnings("this-escape") public HighlightBuilder(StreamInput in) throws IOException { super(in); encoder(in.readOptionalString()); @@ -445,7 +444,7 @@ public HighlightBuilder rewrite(QueryRewriteContext ctx) throws IOException { } - public static class Field extends AbstractHighlighterBuilder { + public static final class Field extends AbstractHighlighterBuilder { static final NamedObjectParser PARSER; static { ObjectParser parser = new ObjectParser<>("highlight_field"); @@ -475,7 +474,6 @@ private Field(Field template, QueryBuilder builder) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public Field(StreamInput in) throws IOException { super(in); name = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java b/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java index bbd626e05d1c8..adf7b797bc5b6 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java @@ -16,7 +16,7 @@ import java.util.Objects; -public class LegacyReaderContext extends ReaderContext { +public final class LegacyReaderContext extends ReaderContext { private final ShardSearchRequest shardSearchRequest; private final ScrollContext scrollContext; private final Engine.Searcher searcher; @@ -24,7 +24,6 @@ public class LegacyReaderContext extends ReaderContext { private AggregatedDfs aggregatedDfs; private RescoreDocIds rescoreDocIds; - @SuppressWarnings("this-escape") public LegacyReaderContext( ShardSearchContextId id, IndexService indexService, diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 618de8c6f06f9..b8dc104c07316 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -61,7 +61,7 @@ /** * A sort builder to sort based on a document field. */ -public class FieldSortBuilder extends SortBuilder { +public final class FieldSortBuilder extends SortBuilder { public static final String NAME = "field_sort"; public static final ParseField MISSING = new ParseField("missing"); @@ -101,7 +101,6 @@ public class FieldSortBuilder extends SortBuilder { private String format; /** Copy constructor. */ - @SuppressWarnings("this-escape") public FieldSortBuilder(FieldSortBuilder template) { this(template.fieldName); this.order(template.order()); diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 88eaadcec5136..5d11563b5d8ed 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -29,7 +29,7 @@ /** * A sort builder allowing to sort by score. */ -public class ScoreSortBuilder extends SortBuilder { +public final class ScoreSortBuilder extends SortBuilder { public static final String NAME = "_score"; private static final SortFieldAndFormat SORT_SCORE = new SortFieldAndFormat( @@ -44,7 +44,6 @@ public class ScoreSortBuilder extends SortBuilder { /** * Build a ScoreSortBuilder default to descending sort order. */ - @SuppressWarnings("this-escape") public ScoreSortBuilder() { // order defaults to desc when sorting on the _score order(SortOrder.DESC); @@ -53,7 +52,6 @@ public ScoreSortBuilder() { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public ScoreSortBuilder(StreamInput in) throws IOException { order(SortOrder.readFromStream(in)); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index 657fc5a898b9d..f126091c785d8 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -44,7 +44,7 @@ /** * Top level suggest result, containing the result for each suggestion. */ -public class Suggest implements Iterable>>, Writeable, ToXContentFragment { +public final class Suggest implements Iterable>>, Writeable, ToXContentFragment { public static final String NAME = "suggest"; @@ -61,7 +61,6 @@ public class Suggest implements Iterable>> suggestMap; - @SuppressWarnings("this-escape") public Suggest(List>> suggestions) { // we sort suggestions by their names to ensure iteration over suggestions are consistent // this is needed as we need to fill in suggestion docs in SearchPhaseController#sortDocs diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index 6c261c040266b..3571d77d7376e 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -28,7 +28,11 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -36,48 +40,73 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * A query that performs kNN search using Lucene's {@link org.apache.lucene.search.KnnFloatVectorQuery} or * {@link org.apache.lucene.search.KnnByteVectorQuery}. * - * NOTE: this is an internal class and should not be used outside of core Elasticsearch code. */ public class KnnVectorQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "knn"; + private static final int NUM_CANDS_LIMIT = 10000; + public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final ParseField NUM_CANDS_FIELD = new ParseField("num_candidates"); + public static final ParseField QUERY_VECTOR_FIELD = new ParseField("query_vector"); + public static final ParseField VECTOR_SIMILARITY_FIELD = new ParseField("similarity"); + public static final ParseField FILTER_FIELD = new ParseField("filter"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("knn", args -> { + List vector = (List) args[1]; + final float[] vectorArray; + if (vector != null) { + vectorArray = new float[vector.size()]; + for (int i = 0; i < vector.size(); i++) { + vectorArray[i] = vector.get(i); + } + } else { + vectorArray = null; + } + return new KnnVectorQueryBuilder((String) args[0], vectorArray, (int) args[2], (Float) args[3]); + }); + + static { + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareFloatArray(constructorArg(), QUERY_VECTOR_FIELD); + // TODO: make num_candidates optional + PARSER.declareInt(constructorArg(), NUM_CANDS_FIELD); + PARSER.declareFloat(optionalConstructorArg(), VECTOR_SIMILARITY_FIELD); + PARSER.declareFieldArray( + KnnVectorQueryBuilder::addFilterQueries, + (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), + FILTER_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY + ); + declareStandardFields(PARSER); + } + + public static KnnVectorQueryBuilder fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } private final String fieldName; private final float[] queryVector; - private final byte[] byteQueryVector; private final int numCands; - private final List filterQueries; + private final List filterQueries = new ArrayList<>(); private final Float vectorSimilarity; public KnnVectorQueryBuilder(String fieldName, float[] queryVector, int numCands, Float vectorSimilarity) { + if (numCands > NUM_CANDS_LIMIT) { + throw new IllegalArgumentException("[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot exceed [" + NUM_CANDS_LIMIT + "]"); + } + if (queryVector == null) { + throw new IllegalArgumentException("[" + QUERY_VECTOR_FIELD.getPreferredName() + "] must be provided"); + } this.fieldName = fieldName; - this.queryVector = Objects.requireNonNull(queryVector); - this.byteQueryVector = null; - this.numCands = numCands; - this.filterQueries = new ArrayList<>(); - this.vectorSimilarity = vectorSimilarity; - } - - public KnnVectorQueryBuilder(String fieldName, byte[] queryVector, int numCands, Float vectorSimilarity) { - this.fieldName = fieldName; - this.queryVector = null; - this.byteQueryVector = Objects.requireNonNull(queryVector); - this.numCands = numCands; - this.filterQueries = new ArrayList<>(); - this.vectorSimilarity = vectorSimilarity; - } - - // Tests only - KnnVectorQueryBuilder(String fieldName, byte[] queryVector, float[] floatQueryVector, int numCands, Float vectorSimilarity) { - assert queryVector != null ^ floatQueryVector != null; - this.fieldName = fieldName; - this.queryVector = floatQueryVector; - this.byteQueryVector = queryVector; + this.queryVector = queryVector; this.numCands = numCands; - this.filterQueries = new ArrayList<>(); this.vectorSimilarity = vectorSimilarity; } @@ -85,17 +114,16 @@ public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); this.numCands = in.readVInt(); - if (in.getTransportVersion().before(TransportVersions.V_8_7_0)) { + if (in.getTransportVersion().before(TransportVersions.V_8_7_0) + || in.getTransportVersion().onOrAfter(TransportVersions.KNN_AS_QUERY_ADDED)) { this.queryVector = in.readFloatArray(); - this.byteQueryVector = null; } else { - this.queryVector = in.readBoolean() ? in.readFloatArray() : null; - this.byteQueryVector = in.readBoolean() ? in.readByteArray() : null; + in.readBoolean(); + this.queryVector = in.readFloatArray(); + in.readBoolean(); // used for byteQueryVector, which was always null } - if (in.getTransportVersion().before(TransportVersions.V_8_2_0)) { - this.filterQueries = new ArrayList<>(); - } else { - this.filterQueries = readQueries(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { + this.filterQueries.addAll(readQueries(in)); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { this.vectorSimilarity = in.readOptionalFloat(); @@ -113,11 +141,6 @@ public float[] queryVector() { return queryVector; } - @Nullable - public byte[] getByteQueryVector() { - return byteQueryVector; - } - @Nullable public Float getVectorSimilarity() { return vectorSimilarity; @@ -147,28 +170,14 @@ public KnnVectorQueryBuilder addFilterQueries(List filterQueries) protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeVInt(numCands); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { - boolean queryVectorNotNull = queryVector != null; - out.writeBoolean(queryVectorNotNull); - if (queryVectorNotNull) { - out.writeFloatArray(queryVector); - } - boolean byteVectorNotNull = byteQueryVector != null; - out.writeBoolean(byteVectorNotNull); - if (byteVectorNotNull) { - out.writeByteArray(byteQueryVector); - } + + if (out.getTransportVersion().before(TransportVersions.V_8_7_0) + || out.getTransportVersion().onOrAfter(TransportVersions.KNN_AS_QUERY_ADDED)) { + out.writeFloatArray(queryVector); } else { - final float[] f; - if (queryVector != null) { - f = queryVector; - } else { - f = new float[byteQueryVector.length]; - for (int i = 0; i < byteQueryVector.length; i++) { - f[i] = byteQueryVector[i]; - } - } - out.writeFloatArray(f); + out.writeBoolean(true); + out.writeFloatArray(queryVector); + out.writeBoolean(false); // used for byteQueryVector, which was always null } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { writeQueries(out, filterQueries); @@ -180,21 +189,21 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME) - .field("field", fieldName) - .field("vector", queryVector != null ? queryVector : byteQueryVector) - .field("num_candidates", numCands); + builder.startObject(NAME); + builder.field(FIELD_FIELD.getPreferredName(), fieldName); + builder.field(QUERY_VECTOR_FIELD.getPreferredName(), queryVector); + builder.field(NUM_CANDS_FIELD.getPreferredName(), numCands); if (vectorSimilarity != null) { - builder.field("similarity", vectorSimilarity); + builder.field(VECTOR_SIMILARITY_FIELD.getPreferredName(), vectorSimilarity); } if (filterQueries.isEmpty() == false) { - builder.startArray("filters"); + builder.startArray(FILTER_FIELD.getPreferredName()); for (QueryBuilder filterQuery : filterQueries) { filterQuery.toXContent(builder, params); } builder.endArray(); } - + boostAndQueryNameToXContent(builder); builder.endObject(); } @@ -204,11 +213,11 @@ public String getWriteableName() { } @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException { boolean changed = false; List rewrittenQueries = new ArrayList<>(filterQueries.size()); for (QueryBuilder query : filterQueries) { - QueryBuilder rewrittenQuery = query.rewrite(queryRewriteContext); + QueryBuilder rewrittenQuery = query.rewrite(ctx); if (rewrittenQuery instanceof MatchNoneQueryBuilder) { return rewrittenQuery; } @@ -218,9 +227,9 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws rewrittenQueries.add(rewrittenQuery); } if (changed) { - return byteQueryVector != null - ? new KnnVectorQueryBuilder(fieldName, byteQueryVector, numCands, vectorSimilarity).addFilterQueries(rewrittenQueries) - : new KnnVectorQueryBuilder(fieldName, queryVector, numCands, vectorSimilarity).addFilterQueries(rewrittenQueries); + return new KnnVectorQueryBuilder(fieldName, queryVector, numCands, vectorSimilarity).boost(boost) + .queryName(queryName) + .addFilterQueries(rewrittenQueries); } return this; } @@ -238,59 +247,53 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { ); } - String parentPath = context.nestedLookup().getNestedParent(fieldName); final BitSetProducer parentFilter; BooleanQuery.Builder builder = new BooleanQuery.Builder(); for (QueryBuilder query : this.filterQueries) { builder.add(query.toQuery(context), BooleanClause.Occur.FILTER); } + if (context.getAliasFilter() != null) { + builder.add(context.getAliasFilter().toQuery(context), BooleanClause.Occur.FILTER); + } BooleanQuery booleanQuery = builder.build(); Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery; DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType; + String parentPath = context.nestedLookup().getNestedParent(fieldName); if (parentPath != null) { - NestedObjectMapper mapper = context.nestedLookup().getNestedMappers().get(parentPath); - NestedObjectMapper objectMapper = context.nestedScope().getObjectMapper(); - if (objectMapper == null) { - parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); + NestedObjectMapper originalObjectMapper = context.nestedScope().getObjectMapper(); + if (originalObjectMapper != null) { + try { + // we are in a nested context, to get the parent filter we need to go up one level + context.nestedScope().previousLevel(); + NestedObjectMapper objectMapper = context.nestedScope().getObjectMapper(); + parentFilter = objectMapper == null + ? context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())) + : context.bitsetFilter(objectMapper.nestedTypeFilter()); + } finally { + context.nestedScope().nextLevel(originalObjectMapper); + } } else { - parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter()); + // we are NOT in a nested context, coming from the top level knn search + parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); } - try { - context.nestedScope().nextLevel(mapper); - if (filterQuery != null) { - filterQuery = new ToChildBlockJoinQuery(filterQuery, parentFilter); - } - return queryVector != null - ? vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, parentFilter) - : vectorFieldType.createKnnQuery(byteQueryVector, numCands, filterQuery, vectorSimilarity, parentFilter); - } finally { - context.nestedScope().previousLevel(); + if (filterQuery != null) { + filterQuery = new ToChildBlockJoinQuery(filterQuery, parentFilter); } + return vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, parentFilter); } - - return queryVector != null - ? vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, null) - : vectorFieldType.createKnnQuery(byteQueryVector, numCands, filterQuery, vectorSimilarity, null); + return vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, null); } @Override protected int doHashCode() { - return Objects.hash( - fieldName, - Arrays.hashCode(queryVector), - Arrays.hashCode(byteQueryVector), - numCands, - filterQueries, - vectorSimilarity - ); + return Objects.hash(fieldName, Arrays.hashCode(queryVector), numCands, filterQueries, vectorSimilarity); } @Override protected boolean doEquals(KnnVectorQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Arrays.equals(queryVector, other.queryVector) - && Arrays.equals(byteQueryVector, other.byteQueryVector) && numCands == other.numCands && Objects.equals(filterQueries, other.filterQueries) && Objects.equals(vectorSimilarity, other.vectorSimilarity); diff --git a/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java b/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java index 8451396e0e590..da0b0d134b0f8 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java @@ -41,7 +41,7 @@ import static org.elasticsearch.core.Strings.format; -public class InternalSnapshotsInfoService implements ClusterStateListener, SnapshotsInfoService { +public final class InternalSnapshotsInfoService implements ClusterStateListener, SnapshotsInfoService { public static final Setting INTERNAL_SNAPSHOT_INFO_MAX_CONCURRENT_FETCHES_SETTING = Setting.intSetting( "cluster.snapshot.info.max_concurrent_fetches", @@ -84,7 +84,6 @@ public class InternalSnapshotsInfoService implements ClusterStateListener, Snaps private final Object mutex; - @SuppressWarnings("this-escape") public InternalSnapshotsInfoService( final Settings settings, final ClusterService clusterService, @@ -220,7 +219,7 @@ protected void doRun() throws Exception { snapshotShard.snapshot().getSnapshotId(), snapshotShard.index(), snapshotShard.shardId() - ).asCopy().getTotalSize(); + ).getTotalSize(); logger.debug("snapshot shard size for {}: {} bytes", snapshotShard, snapshotShardSize); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index a0d4735a91cf4..a2a4c1bd444a5 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -136,7 +136,7 @@ * which removes {@link RestoreInProgress} when all shards are completed. In case of * restore failure a normal recovery fail-over process kicks in. */ -public class RestoreService implements ClusterStateApplier { +public final class RestoreService implements ClusterStateApplier { private static final Logger logger = LogManager.getLogger(RestoreService.class); @@ -190,7 +190,6 @@ public class RestoreService implements ClusterStateApplier { private volatile boolean refreshRepositoryUuidOnRestore; - @SuppressWarnings("this-escape") public RestoreService( ClusterService clusterService, RepositoriesService repositoriesService, diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 5765b0fc4b99a..134e76c57ed4c 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; @@ -67,7 +68,7 @@ * starting and stopping shard level snapshots. * See package level documentation of {@link org.elasticsearch.snapshots} for details. */ -public class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener, IndexEventListener { +public final class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(SnapshotShardsService.class); private final ClusterService clusterService; @@ -88,7 +89,6 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements // Runs the tasks that promptly notify shards of aborted snapshots so that resources can be released ASAP private final ThrottledTaskRunner notifyOnAbortTaskRunner; - @SuppressWarnings("this-escape") public SnapshotShardsService( Settings settings, ClusterService clusterService, @@ -129,12 +129,15 @@ protected void doClose() { @Override public void clusterChanged(ClusterChangedEvent event) { try { - SnapshotsInProgress currentSnapshots = SnapshotsInProgress.get(event.state()); + final var currentSnapshots = SnapshotsInProgress.get(event.state()); if (SnapshotsInProgress.get(event.previousState()).equals(currentSnapshots) == false) { + final var localNodeId = clusterService.localNode().getId(); synchronized (shardSnapshots) { cancelRemoved(currentSnapshots); - for (List snapshots : currentSnapshots.entriesByRepo()) { - startNewSnapshots(snapshots); + for (final var oneRepoSnapshotsInProgress : currentSnapshots.entriesByRepo()) { + for (final var snapshotsInProgressEntry : oneRepoSnapshotsInProgress) { + handleUpdatedSnapshotsInProgressEntry(localNodeId, snapshotsInProgressEntry); + } } } } @@ -183,10 +186,18 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh * @param snapshot snapshot * @return map of shard id to snapshot status */ - public Map currentSnapshotShards(Snapshot snapshot) { + public Map currentSnapshotShards(Snapshot snapshot) { synchronized (shardSnapshots) { - final Map current = shardSnapshots.get(snapshot); - return current == null ? null : new HashMap<>(current); + final var current = shardSnapshots.get(snapshot); + if (current == null) { + return null; + } + + final Map result = Maps.newMapWithExpectedSize(current.size()); + for (final var entry : current.entrySet()) { + result.put(entry.getKey(), entry.getValue().asCopy()); + } + return result; } } @@ -212,54 +223,22 @@ private void cancelRemoved(SnapshotsInProgress snapshotsInProgress) { } } - private void startNewSnapshots(List snapshotsInProgress) { - final String localNodeId = clusterService.localNode().getId(); - for (SnapshotsInProgress.Entry entry : snapshotsInProgress) { - final State entryState = entry.state(); - if (entry.isClone()) { - // This is a snapshot clone, it will be executed on the current master - continue; - } - if (entryState == State.STARTED && entry.hasShardsInInitState()) { - Map startedShards = null; - final Snapshot snapshot = entry.snapshot(); - Map snapshotShards = shardSnapshots.getOrDefault(snapshot, emptyMap()); - for (Map.Entry shard : entry.shards().entrySet()) { - // Add all new shards to start processing on - final ShardId shardId = shard.getKey(); - final ShardSnapshotStatus shardSnapshotStatus = shard.getValue(); - if (shardSnapshotStatus.state() == ShardState.INIT - && localNodeId.equals(shardSnapshotStatus.nodeId()) - && snapshotShards.containsKey(shardId) == false) { - logger.trace("[{}] adding shard to the queue", shardId); - if (startedShards == null) { - startedShards = new HashMap<>(); - } - startedShards.put(shardId, IndexShardSnapshotStatus.newInitializing(shardSnapshotStatus.generation())); - } - } - if (startedShards != null && startedShards.isEmpty() == false) { - shardSnapshots.computeIfAbsent(snapshot, s -> new HashMap<>()).putAll(startedShards); - - final List shardSnapshotTasks = new ArrayList<>(startedShards.size()); - for (final Map.Entry shardEntry : startedShards.entrySet()) { - final ShardId shardId = shardEntry.getKey(); - final IndexShardSnapshotStatus snapshotStatus = shardEntry.getValue(); - final IndexId indexId = entry.indices().get(shardId.getIndexName()); - assert indexId != null; - assert SnapshotsService.useShardGenerations(entry.version()) - || ShardGenerations.fixShardGeneration(snapshotStatus.generation()) == null - : "Found non-null, non-numeric shard generation [" - + snapshotStatus.generation() - + "] for snapshot with old-format compatibility"; - shardSnapshotTasks.add( - newShardSnapshotTask(shardId, snapshot, indexId, snapshotStatus, entry.version(), entry.startTime()) - ); - } + private void handleUpdatedSnapshotsInProgressEntry(String localNodeId, SnapshotsInProgress.Entry entry) { + if (entry.isClone()) { + // This is a snapshot clone, it will be executed on the current master + return; + } - threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> shardSnapshotTasks.forEach(Runnable::run)); + switch (entry.state()) { + case STARTED -> { + if (entry.hasShardsInInitState() == false) { + // Snapshot is running but has no running shards yet, nothing to do + return; } - } else if (entryState == State.ABORTED) { + + startNewShardSnapshots(localNodeId, entry); + } + case ABORTED -> { // Abort all running shards for this snapshot final Snapshot snapshot = entry.snapshot(); Map snapshotShards = shardSnapshots.getOrDefault(snapshot, emptyMap()); @@ -277,9 +256,53 @@ private void startNewSnapshots(List snapshotsInProgre } } } + // otherwise snapshot is not running, nothing to do } } + private void startNewShardSnapshots(String localNodeId, SnapshotsInProgress.Entry entry) { + Map shardsToStart = null; + final Snapshot snapshot = entry.snapshot(); + final var runningShardsForSnapshot = shardSnapshots.getOrDefault(snapshot, emptyMap()).keySet(); + for (var scheduledShard : entry.shards().entrySet()) { + // Add all new shards to start processing on + final var shardId = scheduledShard.getKey(); + final var shardSnapshotStatus = scheduledShard.getValue(); + if (shardSnapshotStatus.state() == ShardState.INIT + && localNodeId.equals(shardSnapshotStatus.nodeId()) + && runningShardsForSnapshot.contains(shardId) == false) { + logger.trace("[{}] adding shard to the queue", shardId); + if (shardsToStart == null) { + shardsToStart = new HashMap<>(); + } + shardsToStart.put(shardId, shardSnapshotStatus.generation()); + } + } + if (shardsToStart == null) { + return; + } + assert shardsToStart.isEmpty() == false; + + final var newSnapshotShards = shardSnapshots.computeIfAbsent(snapshot, s -> new HashMap<>()); + + final List shardSnapshotTasks = new ArrayList<>(shardsToStart.size()); + for (final Map.Entry shardEntry : shardsToStart.entrySet()) { + final ShardId shardId = shardEntry.getKey(); + final IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(shardEntry.getValue()); + newSnapshotShards.put(shardId, snapshotStatus); + final IndexId indexId = entry.indices().get(shardId.getIndexName()); + assert indexId != null; + assert SnapshotsService.useShardGenerations(entry.version()) + || ShardGenerations.fixShardGeneration(snapshotStatus.generation()) == null + : "Found non-null, non-numeric shard generation [" + + snapshotStatus.generation() + + "] for snapshot with old-format compatibility"; + shardSnapshotTasks.add(newShardSnapshotTask(shardId, snapshot, indexId, snapshotStatus, entry.version(), entry.startTime())); + } + + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> shardSnapshotTasks.forEach(Runnable::run)); + } + private Runnable newShardSnapshotTask( final ShardId shardId, final Snapshot snapshot, @@ -483,44 +506,50 @@ public static String getShardStateId(IndexShard indexShard, IndexCommit snapshot private void syncShardStatsOnNewMaster(List entries) { for (SnapshotsInProgress.Entry snapshot : entries) { if (snapshot.state() == State.STARTED || snapshot.state() == State.ABORTED) { - Map localShards = currentSnapshotShards(snapshot.snapshot()); - if (localShards != null) { - Map masterShards = snapshot.shards(); - for (Map.Entry localShard : localShards.entrySet()) { - ShardId shardId = localShard.getKey(); - ShardSnapshotStatus masterShard = masterShards.get(shardId); - if (masterShard != null && masterShard.state().completed() == false) { - final IndexShardSnapshotStatus.Copy indexShardSnapshotStatus = localShard.getValue().asCopy(); - final Stage stage = indexShardSnapshotStatus.getStage(); - // Master knows about the shard and thinks it has not completed - if (stage == Stage.DONE) { - // but we think the shard is done - we need to make new master know that the shard is done - logger.debug( - "[{}] new master thinks the shard [{}] is not completed but the shard is done locally, " - + "updating status on the master", - snapshot.snapshot(), - shardId - ); - notifySuccessfulSnapshotShard(snapshot.snapshot(), shardId, localShard.getValue().getShardSnapshotResult()); - - } else if (stage == Stage.FAILURE) { - // but we think the shard failed - we need to make new master know that the shard failed - logger.debug( - "[{}] new master thinks the shard [{}] is not completed but the shard failed locally, " - + "updating status on master", - snapshot.snapshot(), - shardId - ); - notifyFailedSnapshotShard( - snapshot.snapshot(), - shardId, - indexShardSnapshotStatus.getFailure(), - localShard.getValue().generation() - ); - } + final Map localShards; + synchronized (shardSnapshots) { + final var currentLocalShards = shardSnapshots.get(snapshot.snapshot()); + if (currentLocalShards == null) { + continue; + } + localShards = Map.copyOf(currentLocalShards); + } + Map masterShards = snapshot.shards(); + for (Map.Entry localShard : localShards.entrySet()) { + ShardId shardId = localShard.getKey(); + ShardSnapshotStatus masterShard = masterShards.get(shardId); + if (masterShard != null && masterShard.state().completed() == false) { + final IndexShardSnapshotStatus.Copy indexShardSnapshotStatus = localShard.getValue().asCopy(); + final Stage stage = indexShardSnapshotStatus.getStage(); + // Master knows about the shard and thinks it has not completed + if (stage == Stage.DONE) { + // but we think the shard is done - we need to make new master know that the shard is done + logger.debug( + "[{}] new master thinks the shard [{}] is not completed but the shard is done locally, " + + "updating status on the master", + snapshot.snapshot(), + shardId + ); + notifySuccessfulSnapshotShard(snapshot.snapshot(), shardId, localShard.getValue().getShardSnapshotResult()); + + } else if (stage == Stage.FAILURE) { + // but we think the shard failed - we need to make new master know that the shard failed + logger.debug( + "[{}] new master thinks the shard [{}] is not completed but the shard failed locally, " + + "updating status on master", + snapshot.snapshot(), + shardId + ); + notifyFailedSnapshotShard( + snapshot.snapshot(), + shardId, + indexShardSnapshotStatus.getFailure(), + localShard.getValue().generation() + ); } } } + } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 3317efd7675b0..e6b140a3e70b8 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -130,7 +130,7 @@ * deletion. * See package level documentation of {@link org.elasticsearch.snapshots} for details. */ -public class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { +public final class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { public static final IndexVersion SHARD_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_6_0; @@ -164,7 +164,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus /** * Listeners for snapshot deletion keyed by delete uuid as returned from {@link SnapshotDeletionsInProgress.Entry#uuid()} */ - private final Map>> snapshotDeletionListeners = new HashMap<>(); + private final Map>> snapshotDeletionListeners = new ConcurrentHashMap<>(); // Set of repositories currently running either a snapshot finalization or a snapshot delete. private final Set currentlyFinalizing = Collections.synchronizedSet(new HashSet<>()); @@ -200,7 +200,6 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus private volatile int maxConcurrentOperations; - @SuppressWarnings("this-escape") public SnapshotsService( Settings settings, ClusterService clusterService, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java b/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java index 1caa83aa40488..a3bedd518de89 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java @@ -48,7 +48,7 @@ final class TransportKeepAlive implements Closeable { } } - private final Logger logger = LogManager.getLogger(TransportKeepAlive.class); + private static final Logger logger = LogManager.getLogger(TransportKeepAlive.class); private final CounterMetric successfulPings = new CounterMetric(); private final CounterMetric failedPings = new CounterMetric(); private final ConcurrentMap pingIntervals = ConcurrentCollections.newConcurrentMap(); diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index a67ca691eb8f2..728eae67f22cd 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; @@ -68,8 +67,6 @@ public class SystemIndexMigrator extends AllocatedPersistentTask { private static final Logger logger = LogManager.getLogger(SystemIndexMigrator.class); - private static final Version READY_FOR_MIGRATION_VERSION = Version.V_7_16_0; - // Fixed properties & services private final ParentTaskAssigningClient baseClient; private final ClusterService clusterService; @@ -421,11 +418,6 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer innerListener = ActionListener.wrap(listener::accept, this::markAsFailed); try { - Exception versionException = checkNodeVersionsReadyForMigration(clusterState); - if (versionException != null) { - markAsFailed(versionException); - return; - } createIndex(migrationInfo, ActionListener.wrap(shardsAcknowledgedResponse -> { logger.debug( "while migrating [{}] , got create index response: [{}]", @@ -602,20 +594,6 @@ public void markAsFailed(Exception e) { super.markAsFailed(e); } - private static Exception checkNodeVersionsReadyForMigration(ClusterState state) { - final Version minNodeVersion = state.nodes().getMinNodeVersion(); - if (minNodeVersion.before(READY_FOR_MIGRATION_VERSION)) { - return new IllegalStateException( - "all nodes must be on version [" - + READY_FOR_MIGRATION_VERSION - + "] or later to migrate feature indices but lowest node version currently in cluster is [" - + minNodeVersion - + "]" - ); - } - return null; - } - /** * Creates a task that will clear the results of previous migration attempts. * @param clusterService The cluster service. diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index cfec1fe8d5323..10f465eb8f3d8 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -8,3 +8,4 @@ org.elasticsearch.features.FeaturesSupportedSpecification org.elasticsearch.health.HealthFeature +org.elasticsearch.rest.RestFeatures diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 582f9b44af57b..4f72357f83325 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -124,7 +124,7 @@ public void testSetupRestHandlerContainsKnownBuiltin() { List.of(), RestExtension.allowAll() ); - actionModule.initRestHandlers(null); + actionModule.initRestHandlers(null, null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( IllegalArgumentException.class, @@ -184,7 +184,7 @@ public String getName() { List.of(), RestExtension.allowAll() ); - Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null, null)); assertThat(e.getMessage(), startsWith("Cannot replace existing handler for [/_nodes] for method: GET")); } finally { threadPool.shutdown(); @@ -237,7 +237,7 @@ public List getRestHandlers( List.of(), RestExtension.allowAll() ); - actionModule.initRestHandlers(null); + actionModule.initRestHandlers(null, null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java index 2154381d497c1..3378ff0063bb0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java @@ -65,6 +65,8 @@ private DesiredBalanceStats randomDesiredBalanceStats() { private ClusterBalanceStats randomClusterBalanceStats() { return new ClusterBalanceStats( + randomNonNegativeInt(), + randomNonNegativeInt(), randomBoolean() ? Map.of(DiscoveryNodeRole.DATA_CONTENT_NODE_ROLE.roleName(), randomTierBalanceStats()) : randomSubsetOf( @@ -81,21 +83,27 @@ private ClusterBalanceStats randomClusterBalanceStats() { private ClusterBalanceStats.TierBalanceStats randomTierBalanceStats() { return new ClusterBalanceStats.TierBalanceStats( - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()), - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()), - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()), - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()) + randomMetricStats(), + randomMetricStats(), + randomMetricStats(), + randomMetricStats(), + randomMetricStats() ); } + private ClusterBalanceStats.MetricStats randomMetricStats() { + return new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()); + } + private ClusterBalanceStats.NodeBalanceStats randomNodeBalanceStats() { return new ClusterBalanceStats.NodeBalanceStats( randomAlphaOfLength(10), List.of(randomFrom("data_content", "data_hot", "data_warm", "data_cold")), - randomIntBetween(0, Integer.MAX_VALUE), + randomNonNegativeInt(), + randomNonNegativeInt(), randomDouble(), - randomLongBetween(0, Long.MAX_VALUE), - randomLongBetween(0, Long.MAX_VALUE) + randomNonNegativeLong(), + randomNonNegativeLong() ); } @@ -203,8 +211,13 @@ public void testToXContent() throws IOException { // cluster balance stats Map clusterBalanceStats = (Map) json.get("cluster_balance_stats"); - assertThat(clusterBalanceStats.keySet(), containsInAnyOrder("tiers", "nodes")); + assertThat(clusterBalanceStats.keySet(), containsInAnyOrder("shard_count", "undesired_shard_allocation_count", "tiers", "nodes")); + assertEquals(clusterBalanceStats.get("shard_count"), response.getClusterBalanceStats().shards()); + assertEquals( + clusterBalanceStats.get("undesired_shard_allocation_count"), + response.getClusterBalanceStats().undesiredShardAllocations() + ); // tier balance stats Map tiers = (Map) clusterBalanceStats.get("tiers"); assertEquals(tiers.keySet(), response.getClusterBalanceStats().tiers().keySet()); @@ -212,7 +225,13 @@ public void testToXContent() throws IOException { Map tierStats = (Map) tiers.get(entry.getKey()); assertThat( tierStats.keySet(), - containsInAnyOrder("shard_count", "forecast_write_load", "forecast_disk_usage", "actual_disk_usage") + containsInAnyOrder( + "shard_count", + "undesired_shard_allocation_count", + "forecast_write_load", + "forecast_disk_usage", + "actual_disk_usage" + ) ); Map shardCountStats = (Map) tierStats.get("shard_count"); @@ -223,6 +242,16 @@ public void testToXContent() throws IOException { assertEquals(shardCountStats.get("max"), entry.getValue().shardCount().max()); assertEquals(shardCountStats.get("std_dev"), entry.getValue().shardCount().stdDev()); + Map undesiredShardAllocationCountStats = (Map) tierStats.get( + "undesired_shard_allocation_count" + ); + assertThat(undesiredShardAllocationCountStats.keySet(), containsInAnyOrder("total", "average", "min", "max", "std_dev")); + assertEquals(undesiredShardAllocationCountStats.get("total"), entry.getValue().undesiredShardAllocations().total()); + assertEquals(undesiredShardAllocationCountStats.get("average"), entry.getValue().undesiredShardAllocations().average()); + assertEquals(undesiredShardAllocationCountStats.get("min"), entry.getValue().undesiredShardAllocations().min()); + assertEquals(undesiredShardAllocationCountStats.get("max"), entry.getValue().undesiredShardAllocations().max()); + assertEquals(undesiredShardAllocationCountStats.get("std_dev"), entry.getValue().undesiredShardAllocations().stdDev()); + Map forecastWriteLoadStats = (Map) tierStats.get("forecast_write_load"); assertThat(forecastWriteLoadStats.keySet(), containsInAnyOrder("total", "average", "min", "max", "std_dev")); assertEquals(forecastWriteLoadStats.get("total"), entry.getValue().forecastWriteLoad().total()); @@ -258,6 +287,7 @@ public void testToXContent() throws IOException { "node_id", "roles", "shard_count", + "undesired_shard_allocation_count", "forecast_write_load", "forecast_disk_usage_bytes", "actual_disk_usage_bytes" @@ -266,6 +296,7 @@ public void testToXContent() throws IOException { assertEquals(nodesStats.get("node_id"), entry.getValue().nodeId()); assertEquals(nodesStats.get("roles"), entry.getValue().roles()); assertEquals(nodesStats.get("shard_count"), entry.getValue().shards()); + assertEquals(nodesStats.get("undesired_shard_allocation_count"), entry.getValue().undesiredShardAllocations()); assertEquals(nodesStats.get("forecast_write_load"), entry.getValue().forecastWriteLoad()); assertEquals(nodesStats.get("forecast_disk_usage_bytes"), entry.getValue().forecastShardSize()); assertEquals(nodesStats.get("actual_disk_usage_bytes"), entry.getValue().actualShardSize()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessor2Tests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessor2Tests.java index 351af06c5e95d..30810e85382ca 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessor2Tests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessor2Tests.java @@ -8,8 +8,6 @@ package org.elasticsearch.action.bulk; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -52,7 +50,6 @@ public class BulkProcessor2Tests extends ESTestCase { private ThreadPool threadPool; - private final Logger logger = LogManager.getLogger(BulkProcessor2Tests.class); @Before public void startThreadPool() { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java index 09f79d1e48d0d..3e0ddb6390893 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.action.bulk; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -59,7 +57,6 @@ public class BulkProcessorTests extends ESTestCase { private ThreadPool threadPool; - private final Logger logger = LogManager.getLogger(BulkProcessorTests.class); @Before public void startThreadPool() { diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 588260d79d408..bf27d7fe73c4b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -354,6 +355,6 @@ private static List toTasks( } private static ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java index 8aa10227dd66d..ea2bc79542e4a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry; @@ -408,6 +409,6 @@ private ClusterState executeTasks(final ClusterState state, final List ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java index 7a79dc44c29fd..89bd12021bf1b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java @@ -215,6 +215,7 @@ public void testApplyCommitEqualsHashCodeSerialization() { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101733") public void testJoinRequestEqualsHashCodeSerialization() { Join initialJoin = new Join( createNode(randomAlphaOfLength(10)), diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java index 559c0a3628059..46f03aef76b90 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -71,9 +72,7 @@ public class NodeJoinExecutorTests extends ESTestCase { - private static final ActionListener NOT_COMPLETED_LISTENER = ActionListener.running(() -> { - throw new AssertionError("should not complete publication"); - }); + private static final ActionListener NOT_COMPLETED_LISTENER = ActionTestUtils.assertNoFailureListener(t -> {}); public void testPreventJoinClusterWithNewerIndices() { Settings.builder().build(); @@ -481,7 +480,10 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + ActionListener.wrap( + r -> fail("Task should have failed"), + e -> assertThat(e.getMessage(), containsString("found existing node")) + ), executorTerm ) ) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java index f8a8c5db61ed4..8b9ef91923839 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java @@ -65,7 +65,17 @@ public static ComponentTemplate randomInstance() { return randomInstance(false); } + // Deprecated component templates may lead to deprecation warnings when used in non-deprecated index templates + // to avoid test failures due to unexpected deprecation warnings, returns a non-deprecated instance + public static ComponentTemplate randomNonDeprecatedInstance() { + return randomInstance(false, randomFrom(Boolean.FALSE, null)); + } + public static ComponentTemplate randomInstance(boolean lifecycleAllowed) { + return randomInstance(lifecycleAllowed, randomOptionalBoolean()); + } + + public static ComponentTemplate randomInstance(boolean lifecycleAllowed, Boolean deprecated) { Settings settings = null; CompressedXContent mappings = null; Map aliases = null; @@ -88,7 +98,7 @@ public static ComponentTemplate randomInstance(boolean lifecycleAllowed) { if (randomBoolean()) { meta = randomMeta(); } - return new ComponentTemplate(template, randomBoolean() ? null : randomNonNegativeLong(), meta); + return new ComponentTemplate(template, randomBoolean() ? null : randomNonNegativeLong(), meta, deprecated); } public static Map randomAliases() { @@ -136,7 +146,7 @@ protected ComponentTemplate mutateInstance(ComponentTemplate orig) { } public static ComponentTemplate mutateTemplate(ComponentTemplate orig) { - return switch (randomIntBetween(0, 2)) { + return switch (randomIntBetween(0, 3)) { case 0 -> { Template ot = orig.template(); yield switch (randomIntBetween(0, 3)) { @@ -148,7 +158,8 @@ yield switch (randomIntBetween(0, 3)) { ot.lifecycle() ), orig.version(), - orig.metadata() + orig.metadata(), + orig.deprecated() ); case 1 -> new ComponentTemplate( new Template( @@ -158,7 +169,8 @@ yield switch (randomIntBetween(0, 3)) { ot.lifecycle() ), orig.version(), - orig.metadata() + orig.metadata(), + orig.deprecated() ); case 2 -> new ComponentTemplate( new Template( @@ -168,7 +180,8 @@ yield switch (randomIntBetween(0, 3)) { ot.lifecycle() ), orig.version(), - orig.metadata() + orig.metadata(), + orig.deprecated() ); case 3 -> new ComponentTemplate( new Template( @@ -178,7 +191,8 @@ yield switch (randomIntBetween(0, 3)) { randomValueOtherThan(ot.lifecycle(), DataStreamLifecycleTests::randomLifecycle) ), orig.version(), - orig.metadata() + orig.metadata(), + orig.deprecated() ); default -> throw new IllegalStateException("illegal randomization branch"); }; @@ -186,12 +200,20 @@ yield switch (randomIntBetween(0, 3)) { case 1 -> new ComponentTemplate( orig.template(), randomValueOtherThan(orig.version(), ESTestCase::randomNonNegativeLong), - orig.metadata() + orig.metadata(), + orig.deprecated() ); case 2 -> new ComponentTemplate( orig.template(), orig.version(), - randomValueOtherThan(orig.metadata(), ComponentTemplateTests::randomMeta) + randomValueOtherThan(orig.metadata(), ComponentTemplateTests::randomMeta), + orig.deprecated() + ); + case 3 -> new ComponentTemplate( + orig.template(), + orig.version(), + orig.metadata(), + orig.isDeprecated() ? randomFrom(false, null) : true ); default -> throw new IllegalStateException("illegal randomization branch"); }; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java index 506e814ad1447..f617692710ebc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java @@ -92,8 +92,9 @@ public static ComposableIndexTemplate randomInstance() { randomBoolean() ? null : randomNonNegativeLong(), meta, dataStreamTemplate, - randomBoolean() ? null : randomBoolean(), - ignoreMissingComponentTemplates + randomOptionalBoolean(), + ignoreMissingComponentTemplates, + randomOptionalBoolean() ); } @@ -158,7 +159,7 @@ protected ComposableIndexTemplate mutateInstance(ComposableIndexTemplate orig) { } public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate orig) { - switch (randomIntBetween(0, 7)) { + switch (randomIntBetween(0, 8)) { case 0: List newIndexPatterns = randomValueOtherThan( orig.indexPatterns(), @@ -172,7 +173,9 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.version(), orig.metadata(), orig.getDataStreamTemplate(), - null + null, + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 1: return new ComposableIndexTemplate( @@ -187,7 +190,8 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.metadata(), orig.getDataStreamTemplate(), orig.getAllowAutoCreate(), - orig.getIgnoreMissingComponentTemplates() + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 2: List newComposedOf = randomValueOtherThan(orig.composedOf(), () -> randomList(0, 10, () -> randomAlphaOfLength(5))); @@ -200,7 +204,8 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.metadata(), orig.getDataStreamTemplate(), orig.getAllowAutoCreate(), - orig.getIgnoreMissingComponentTemplates() + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 3: return new ComposableIndexTemplate( @@ -212,7 +217,8 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.metadata(), orig.getDataStreamTemplate(), orig.getAllowAutoCreate(), - orig.getIgnoreMissingComponentTemplates() + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 4: return new ComposableIndexTemplate( @@ -224,7 +230,8 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.metadata(), orig.getDataStreamTemplate(), orig.getAllowAutoCreate(), - orig.getIgnoreMissingComponentTemplates() + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 5: return new ComposableIndexTemplate( @@ -236,7 +243,8 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori randomValueOtherThan(orig.metadata(), ComposableIndexTemplateTests::randomMeta), orig.getDataStreamTemplate(), orig.getAllowAutoCreate(), - orig.getIgnoreMissingComponentTemplates() + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 6: return new ComposableIndexTemplate( @@ -248,7 +256,8 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.metadata(), randomValueOtherThan(orig.getDataStreamTemplate(), ComposableIndexTemplateTests::randomDataStreamTemplate), orig.getAllowAutoCreate(), - orig.getIgnoreMissingComponentTemplates() + orig.getIgnoreMissingComponentTemplates(), + orig.deprecated() ); case 7: List ignoreMissingComponentTemplates = randomValueOtherThan( @@ -264,7 +273,21 @@ public static ComposableIndexTemplate mutateTemplate(ComposableIndexTemplate ori orig.metadata(), orig.getDataStreamTemplate(), orig.getAllowAutoCreate(), - ignoreMissingComponentTemplates + ignoreMissingComponentTemplates, + orig.deprecated() + ); + case 8: + return new ComposableIndexTemplate( + orig.indexPatterns(), + orig.template(), + orig.composedOf(), + orig.priority(), + orig.version(), + orig.metadata(), + orig.getDataStreamTemplate(), + orig.getAllowAutoCreate(), + orig.getIgnoreMissingComponentTemplates(), + orig.isDeprecated() ? randomFrom(false, null) : true ); default: throw new IllegalStateException("illegal randomization branch"); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 014834e98600a..24afb569a8167 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -1016,7 +1016,7 @@ public void testFindV2Templates() throws Exception { ClusterState state = ClusterState.EMPTY_STATE; assertNull(MetadataIndexTemplateService.findV2Template(state.metadata(), "index", randomBoolean())); - ComponentTemplate ct = ComponentTemplateTests.randomInstance(); + ComponentTemplate ct = ComponentTemplateTests.randomNonDeprecatedInstance(); state = service.addComponentTemplate(state, true, "ct", ct); ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), null, List.of("ct"), null, 1L, null, null, null); state = service.addIndexTemplateV2(state, true, "my-template", it); @@ -1033,7 +1033,7 @@ public void testFindV2TemplatesForHiddenIndex() throws Exception { ClusterState state = ClusterState.EMPTY_STATE; assertNull(MetadataIndexTemplateService.findV2Template(state.metadata(), "index", true)); - ComponentTemplate ct = ComponentTemplateTests.randomInstance(); + ComponentTemplate ct = ComponentTemplateTests.randomNonDeprecatedInstance(); state = service.addComponentTemplate(state, true, "ct", ct); ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("i*"), null, List.of("ct"), 0L, 1L, null, null, null); state = service.addIndexTemplateV2(state, true, "my-template", it); @@ -1051,7 +1051,7 @@ public void testFindV2TemplatesForDateMathIndex() throws Exception { ClusterState state = ClusterState.EMPTY_STATE; assertNull(MetadataIndexTemplateService.findV2Template(state.metadata(), indexName, true)); - ComponentTemplate ct = ComponentTemplateTests.randomInstance(); + ComponentTemplate ct = ComponentTemplateTests.randomNonDeprecatedInstance(); state = service.addComponentTemplate(state, true, "ct", ct); ComposableIndexTemplate it = new ComposableIndexTemplate(List.of("index-*"), null, List.of("ct"), 0L, 1L, null, null, null); state = service.addIndexTemplateV2(state, true, "my-template", it); @@ -2009,7 +2009,7 @@ public void testUpdateComponentTemplateFailsIfResolvedIndexTemplatesWouldBeInval public void testPutExistingComponentTemplateIsNoop() throws Exception { MetadataIndexTemplateService metadataIndexTemplateService = getMetadataIndexTemplateService(); ClusterState state = ClusterState.EMPTY_STATE; - ComponentTemplate componentTemplate = ComponentTemplateTests.randomInstance(); + ComponentTemplate componentTemplate = ComponentTemplateTests.randomNonDeprecatedInstance(); state = metadataIndexTemplateService.addComponentTemplate(state, false, "foo", componentTemplate); assertNotNull(state.metadata().componentTemplates().get("foo")); @@ -2597,6 +2597,30 @@ public void testComposableTemplateWithSubobjectsFalse() throws Exception { ); } + public void testAddIndexTemplateWithDeprecatedComponentTemplate() throws Exception { + ClusterState state = ClusterState.EMPTY_STATE; + final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); + + ComponentTemplate ct = ComponentTemplateTests.randomInstance(false, true); + state = service.addComponentTemplate(state, true, "ct", ct); + + ComposableIndexTemplate it = new ComposableIndexTemplate( + List.of("test*"), + null, + List.of("ct"), + null, + 1L, + null, + null, + null, + null, + null + ); + service.addIndexTemplateV2(state, false, "foo", it); + + assertWarnings("index template [foo] uses deprecated component template [ct]"); + } + private static List putTemplate(NamedXContentRegistry xContentRegistry, PutRequest request) { ThreadPool testThreadPool = mock(ThreadPool.class); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java index 6ce0f722fc0f5..d52678925c138 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -39,7 +37,6 @@ import static org.hamcrest.Matchers.not; public class AddIncrementallyTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(AddIncrementallyTests.class); public void testAddNodesAndIndices() { Settings.Builder settings = Settings.builder(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index c67671d5b240c..be8807292350b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterInfo; @@ -78,7 +76,6 @@ import static org.hamcrest.Matchers.nullValue; public class AllocationCommandsTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(AllocationCommandsTests.class); public void testMoveShardCommand() { AllocationService allocation = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index 02c25c1fd48ee..93949d5b5d590 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -54,8 +52,6 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(AwarenessAllocationTests.class); - public void testMoveShardOnceNewNodeWithAttributeAdded1() { AllocationService strategy = createAllocationService( Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index d8e1b58414484..0c54673be703d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -39,7 +37,6 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(BalanceConfigurationTests.class); // TODO maybe we can randomize these numbers somehow final int numberOfNodes = 25; final int numberOfIndices = 12; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index f0469c5289090..328777bfe28e7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -38,7 +36,6 @@ import static org.hamcrest.Matchers.not; public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ClusterRebalanceRoutingTests.class); public void testAlways() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java index 10dcfd5f84548..2143f530e2cfc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue; public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ConcurrentRebalanceRoutingTests.class); public void testClusterConcurrentRebalance() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java index 2c0400c959aed..2bbc76f0b1bca 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -29,7 +27,6 @@ import static org.hamcrest.Matchers.not; public class ElectReplicaAsPrimaryDuringRelocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class); public void testElectReplicaAsPrimaryDuringRelocation() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java index 8aa00f07b8053..a99c77b0be3cb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterName; @@ -34,8 +32,6 @@ import static org.hamcrest.Matchers.not; public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ExpectedShardSizeAllocationTests.class); - public void testInitializingHasExpectedSize() { final long byteSize = randomIntBetween(0, Integer.MAX_VALUE); final ClusterInfo clusterInfo = createClusterInfoWith(new ShardId("test", "_na_", 0), byteSize); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index 43b378c88ccd7..d76e9912cef04 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; @@ -55,7 +53,6 @@ import static org.hamcrest.Matchers.equalTo; public class FailedNodeRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(FailedNodeRoutingTests.class); public void testSimpleFailedNodeTest() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 74926a7556348..607d1feeaea55 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; @@ -49,7 +47,6 @@ import static org.hamcrest.Matchers.nullValue; public class FailedShardsRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(FailedShardsRoutingTests.class); public void testFailedShardPrimaryRelocatingToAndFrom() { AllocationService allocation = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java index d3c49d23ce20a..d2390dfda788b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -419,6 +420,6 @@ private ClusterState createOnePrimaryOneReplicaClusterState(AllocationService al } private static ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java index 28af4ba0a29ab..911769b0115dc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -42,7 +40,6 @@ import static org.hamcrest.Matchers.nullValue; public class IndexBalanceTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(IndexBalanceTests.class); public void testBalanceAllNodesStarted() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index c348cdfe43ecf..c622a187d09ae 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; @@ -78,8 +76,6 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(NodeVersionAllocationDeciderTests.class); - public void testDoNotAllocateFromPrimary() { AllocationService strategy = createAllocationService( Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java index a322e34583717..889b4552407ab 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +26,6 @@ import static org.hamcrest.Matchers.equalTo; public class PreferPrimaryAllocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(PreferPrimaryAllocationTests.class); public void testPreferPrimaryAllocationOverReplicas() { logger.info("create an allocation with 1 initial recoveries"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java index 2456bbe41b903..3e18e9c68b6e4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue; public class PrimaryElectionRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(PrimaryElectionRoutingTests.class); public void testBackupElectionToPrimaryWhenPrimaryCanBeAllocatedToAnotherNode() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java index 35949ecb68103..3edc03da4cb59 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -29,8 +27,6 @@ import static org.hamcrest.Matchers.equalTo; public class PrimaryNotRelocatedWhileBeingRecoveredTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(PrimaryNotRelocatedWhileBeingRecoveredTests.class); - public void testPrimaryNotRelocatedWhileBeingRecoveredFrom() { AllocationService strategy = createAllocationService( Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index 4e6fe3f9aaee6..4471ed678b013 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterName; @@ -37,7 +35,6 @@ import static org.hamcrest.Matchers.nullValue; public class RebalanceAfterActiveTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(RebalanceAfterActiveTests.class); public void testRebalanceOnlyAfterAllShardsAreActive() { final long[] sizes = new long[5]; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java index fe52e8331f957..2a109cf692ff8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue; public class ReplicaAllocatedAfterPrimaryTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ReplicaAllocatedAfterPrimaryTests.class); public void testBackupIsAllocatedAfterPrimary() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java index e01ae60abc526..2ac230e1ff18f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -29,8 +27,6 @@ import static org.hamcrest.Matchers.equalTo; public class ShardVersioningTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ShardVersioningTests.class); - public void testSimple() { AllocationService strategy = createAllocationService( Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java index 45e8fe4e525cd..b333e1cdf6fa9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java @@ -314,6 +314,98 @@ public void testShouldBeRedWhenThereAreUnassignedPrimariesAndUnassignedReplicas( ); } + public void testAllReplicasUnassigned() { + { + ClusterState clusterState = createClusterStateWith( + List.of( + index( + "myindex", + new ShardAllocation(randomNodeId(), AVAILABLE), + new ShardAllocation(randomNodeId(), AVAILABLE), + new ShardAllocation(randomNodeId(), AVAILABLE) + ) + ), + List.of() + ); + var service = createShardsAvailabilityIndicatorService(clusterState); + ShardAllocationStatus status = service.createNewStatus(clusterState.metadata()); + ShardsAvailabilityHealthIndicatorService.updateShardAllocationStatus( + status, + clusterState, + NodesShutdownMetadata.EMPTY, + randomBoolean() + ); + assertFalse(status.replicas.doAnyIndicesHaveAllUnavailable()); + } + { + ClusterState clusterState = createClusterStateWith( + List.of( + index( + "myindex", + new ShardAllocation(randomNodeId(), AVAILABLE), + new ShardAllocation(randomNodeId(), randomFrom(UNAVAILABLE, INITIALIZING)), + new ShardAllocation(randomNodeId(), AVAILABLE) + ) + ), + List.of() + ); + var service = createShardsAvailabilityIndicatorService(clusterState); + ShardAllocationStatus status = service.createNewStatus(clusterState.metadata()); + ShardsAvailabilityHealthIndicatorService.updateShardAllocationStatus( + status, + clusterState, + NodesShutdownMetadata.EMPTY, + randomBoolean() + ); + assertFalse(status.replicas.doAnyIndicesHaveAllUnavailable()); + } + { + ClusterState clusterState = createClusterStateWith( + List.of( + index( + "myindex", + new ShardAllocation(randomNodeId(), AVAILABLE), + new ShardAllocation(randomNodeId(), randomFrom(UNAVAILABLE, INITIALIZING)), + new ShardAllocation(randomNodeId(), randomFrom(UNAVAILABLE, INITIALIZING)) + ) + ), + List.of() + ); + var service = createShardsAvailabilityIndicatorService(clusterState); + ShardAllocationStatus status = service.createNewStatus(clusterState.metadata()); + ShardsAvailabilityHealthIndicatorService.updateShardAllocationStatus( + status, + clusterState, + NodesShutdownMetadata.EMPTY, + randomBoolean() + ); + assertTrue(status.replicas.doAnyIndicesHaveAllUnavailable()); + } + { + ClusterState clusterState = createClusterStateWith( + List.of( + indexWithTwoPrimaryOneReplicaShard( + "myindex", + new ShardAllocation(randomNodeId(), AVAILABLE), // Primary 1 + new ShardAllocation(randomNodeId(), AVAILABLE), // Replica 1 + new ShardAllocation(randomNodeId(), AVAILABLE), // Primary 2 + new ShardAllocation(randomNodeId(), randomFrom(UNAVAILABLE, INITIALIZING)) // Replica 2 + ) + ), + List.of() + ); + var service = createShardsAvailabilityIndicatorService(clusterState); + ShardAllocationStatus status = service.createNewStatus(clusterState.metadata()); + ShardsAvailabilityHealthIndicatorService.updateShardAllocationStatus( + status, + clusterState, + NodesShutdownMetadata.EMPTY, + randomBoolean() + ); + assertTrue(status.replicas.doAnyIndicesHaveAllUnavailable()); + } + } + public void testShouldBeRedWhenThereAreUnassignedPrimariesAndNoReplicas() { var clusterState = createClusterStateWith(List.of(index("red-index", new ShardAllocation(randomNodeId(), UNAVAILABLE))), List.of()); var service = createShardsAvailabilityIndicatorService(clusterState); @@ -1944,22 +2036,28 @@ private static IndexRoutingTable index(String name, ShardAllocation primaryState ); } - private static IndexRoutingTable frozenIndex(String name, ShardAllocation primaryState, String originalIndex) { - return index( - IndexMetadata.builder(name) - .settings( - Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY, originalIndex) - .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE) - .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, randomBoolean()) - .build() - ) - .numberOfShards(1) - .numberOfReplicas(0) - .build(), - primaryState - ); + private static IndexRoutingTable indexWithTwoPrimaryOneReplicaShard( + String name, + ShardAllocation primary1State, + ShardAllocation replica1State, + ShardAllocation primary2State, + ShardAllocation replica2State + ) { + var indexMetadata = IndexMetadata.builder(name) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build()) + .numberOfShards(2) + .numberOfReplicas(1) + .build(); + var index = indexMetadata.getIndex(); + var shard1Id = new ShardId(index, 0); + var shard2Id = new ShardId(index, 1); + + var builder = IndexRoutingTable.builder(index); + builder.addShard(createShardRouting(shard1Id, true, primary1State)); + builder.addShard(createShardRouting(shard2Id, true, primary2State)); + builder.addShard(createShardRouting(shard1Id, false, replica1State)); + builder.addShard(createShardRouting(shard2Id, false, replica2State)); + return builder.build(); } private static IndexRoutingTable index(IndexMetadata indexMetadata, ShardAllocation primaryState, ShardAllocation... replicaStates) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index a52c768b009e8..4c68404efccb8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +29,6 @@ import static org.hamcrest.Matchers.equalTo; public class ShardsLimitAllocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ShardsLimitAllocationTests.class); public void testIndexLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index 3f18d3ef89c44..d8deaff71b786 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -44,7 +42,6 @@ import static org.hamcrest.Matchers.nullValue; public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(SingleShardNoReplicasRoutingTests.class); public void testSingleIndexStartedShard() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java index 5ffcaa7cccd5a..62529a9efccb5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue; public class SingleShardOneReplicaRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(SingleShardOneReplicaRoutingTests.class); public void testSingleIndexFirstStartPrimaryThenBackups() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java index a198b13cb4d01..bba1271a37be0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -34,7 +32,6 @@ import static org.hamcrest.Matchers.nullValue; public class TenShardsOneReplicaRoutingTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(TenShardsOneReplicaRoutingTests.class); public void testSingleIndexFirstStartPrimaryThenBackups() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 86b462e989fa4..c200d2df90cd5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -59,7 +57,6 @@ import static org.hamcrest.Matchers.equalTo; public class ThrottlingAllocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(ThrottlingAllocationTests.class); public void testPrimaryRecoveryThrottling() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java index 1a0d41b6f3ee2..ef671dab76b14 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +29,6 @@ import static org.hamcrest.Matchers.nullValue; public class UpdateNumberOfReplicasTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(UpdateNumberOfReplicasTests.class); public void testUpdateNumberOfReplicas() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java index 6f2866095c2e5..e4b6c0275150d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -58,25 +59,33 @@ public void testStatsForSingleTierClusterWithNoForecasts() { List.of(indexSizes("index-1", 1L, 1L), indexSizes("index-2", 2L, 2L), indexSizes("index-3", 3L, 3L)) ); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom( + clusterState, + createDesiredBalance(clusterState), + clusterInfo, + TEST_WRITE_LOAD_FORECASTER + ); assertThat( stats, equalTo( new ClusterBalanceStats( + 6, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(6.0, 2.0, 2.0, 2.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(12.0, 3.0, 5.0, 4.0, stdDev(3.0, 5.0, 4.0)), new MetricStats(12.0, 3.0, 5.0, 4.0, stdDev(3.0, 5.0, 4.0)) ) ), Map.ofEntries( - Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0.0, 4L, 4L)), - Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0.0, 3L, 3L)), - Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0.0, 5L, 5L)) + Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 0.0, 4L, 4L)), + Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 0.0, 3L, 3L)), + Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 0.0, 5L, 5L)) ) ) ) @@ -102,25 +111,33 @@ public void testStatsForSingleTierClusterWithForecasts() { List.of(indexSizes("index-1", 1L, 1L), indexSizes("index-2", 2L, 2L), indexSizes("index-3", 3L, 3L)) ); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom( + clusterState, + createDesiredBalance(clusterState), + clusterInfo, + TEST_WRITE_LOAD_FORECASTER + ); assertThat( stats, equalTo( new ClusterBalanceStats( + 6, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(6.0, 2.0, 2.0, 2.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(12.0, 3.5, 4.5, 4.0, stdDev(3.5, 4.0, 4.5)), new MetricStats(36.0, 10.0, 14.0, 12.0, stdDev(10.0, 12.0, 14.0)), new MetricStats(12.0, 3.0, 5.0, 4.0, stdDev(3.0, 5.0, 4.0)) ) ), Map.ofEntries( - Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 3.5, 14L, 4L)), - Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 4.0, 12L, 3L)), - Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 4.5, 10L, 5L)) + Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 3.5, 14L, 4L)), + Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 4.0, 12L, 3L)), + Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 4.5, 10L, 5L)) ) ) ) @@ -157,7 +174,12 @@ public void testStatsForHotWarmClusterWithForecasts() { ) ); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom( + clusterState, + createDesiredBalance(clusterState), + clusterInfo, + TEST_WRITE_LOAD_FORECASTER + ); var hotRoleNames = List.of(DATA_CONTENT_NODE_ROLE.roleName(), DATA_HOT_NODE_ROLE.roleName()); var warmRoleNames = List.of(DATA_WARM_NODE_ROLE.roleName()); @@ -165,10 +187,13 @@ public void testStatsForHotWarmClusterWithForecasts() { stats, equalTo( new ClusterBalanceStats( + 10, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(7.0, 2.0, 3.0, 7.0 / 3, stdDev(3.0, 2.0, 2.0)), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(21.0, 6.0, 8.5, 7.0, stdDev(6.0, 8.5, 6.5)), new MetricStats(36.0, 10.0, 16.0, 12.0, stdDev(10.0, 10.0, 16.0)), new MetricStats(34.0, 9.0, 15.0, 34.0 / 3, stdDev(9.0, 10.0, 15.0)) @@ -176,6 +201,7 @@ public void testStatsForHotWarmClusterWithForecasts() { DATA_HOT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(7.0, 2.0, 3.0, 7.0 / 3, stdDev(3.0, 2.0, 2.0)), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(21.0, 6.0, 8.5, 7.0, stdDev(6.0, 8.5, 6.5)), new MetricStats(36.0, 10.0, 16.0, 12.0, stdDev(10.0, 10.0, 16.0)), new MetricStats(34.0, 9.0, 15.0, 34.0 / 3, stdDev(9.0, 10.0, 15.0)) @@ -184,17 +210,18 @@ public void testStatsForHotWarmClusterWithForecasts() { new ClusterBalanceStats.TierBalanceStats( new MetricStats(3.0, 1.0, 1.0, 1.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(42.0, 12.0, 18.0, 14.0, stdDev(12.0, 12.0, 18.0)), new MetricStats(42.0, 12.0, 18.0, 14.0, stdDev(12.0, 12.0, 18.0)) ) ), Map.ofEntries( - Map.entry("node-hot-1", new NodeBalanceStats("node-hot-1", hotRoleNames, 3, 8.5, 16L, 15L)), - Map.entry("node-hot-2", new NodeBalanceStats("node-hot-2", hotRoleNames, 2, 6.0, 10L, 9L)), - Map.entry("node-hot-3", new NodeBalanceStats("node-hot-3", hotRoleNames, 2, 6.5, 10L, 10L)), - Map.entry("node-warm-1", new NodeBalanceStats("node-warm-1", warmRoleNames, 1, 0.0, 12L, 12L)), - Map.entry("node-warm-2", new NodeBalanceStats("node-warm-2", warmRoleNames, 1, 0.0, 12L, 12L)), - Map.entry("node-warm-3", new NodeBalanceStats("node-warm-3", warmRoleNames, 1, 0.0, 18L, 18L)) + Map.entry("node-hot-1", new NodeBalanceStats("node-hot-1", hotRoleNames, 3, 0, 8.5, 16L, 15L)), + Map.entry("node-hot-2", new NodeBalanceStats("node-hot-2", hotRoleNames, 2, 0, 6.0, 10L, 9L)), + Map.entry("node-hot-3", new NodeBalanceStats("node-hot-3", hotRoleNames, 2, 0, 6.5, 10L, 10L)), + Map.entry("node-warm-1", new NodeBalanceStats("node-warm-1", warmRoleNames, 1, 0, 0.0, 12L, 12L)), + Map.entry("node-warm-2", new NodeBalanceStats("node-warm-2", warmRoleNames, 1, 0, 0.0, 12L, 12L)), + Map.entry("node-warm-3", new NodeBalanceStats("node-warm-3", warmRoleNames, 1, 0, 0.0, 18L, 18L)) ) ) ) @@ -213,25 +240,28 @@ public void testStatsForNoIndicesInTier() { ); var clusterInfo = createClusterInfo(List.of()); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom(clusterState, null, clusterInfo, TEST_WRITE_LOAD_FORECASTER); assertThat( stats, equalTo( new ClusterBalanceStats( + 0, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0) ) ), Map.ofEntries( - Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0.0, 0L, 0L)), - Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0.0, 0L, 0L)), - Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0.0, 0L, 0L)) + Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0, 0.0, 0L, 0L)), + Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0, 0.0, 0L, 0L)), + Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0, 0.0, 0L, 0L)) ) ) ) @@ -269,6 +299,20 @@ private static ClusterState createClusterState(List nodes, List(); + for (var indexRoutingTable : state.getRoutingTable()) { + for (int i = 0; i < indexRoutingTable.size(); i++) { + var indexShardRoutingTable = indexRoutingTable.shard(i); + assignments.put( + indexShardRoutingTable.shardId(), + new ShardAssignment(Set.of(indexShardRoutingTable.primaryShard().currentNodeId()), 1, 0, 0) + ); + } + } + return new DesiredBalance(1, assignments); + } + private static Tuple startedIndex( String indexName, @Nullable Double indexWriteLoadForecast, diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index 0edc8c74ab1db..b4cc0709320dd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -10,8 +10,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -45,8 +43,6 @@ public class EnableAllocationTests extends ESAllocationTestCase { - private final Logger logger = LogManager.getLogger(EnableAllocationTests.class); - public void testClusterEnableNone() { AllocationService strategy = createAllocationService( Settings.builder().put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name()).build() diff --git a/server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java b/server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java new file mode 100644 index 0000000000000..0eeb1811849e9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class RestGetHealthActionTests extends ESTestCase { + + public void testHealthReportAPIDoesNotTripCircuitBreakers() { + assertThat(new RestGetHealthAction().canTripCircuitBreaker(), is(false)); + } +} diff --git a/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java b/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java index 0781cf6614dac..a228f0e4792a9 100644 --- a/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java @@ -52,6 +52,7 @@ import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class TransportHealthNodeActionTests extends ESTestCase { private static ThreadPool threadPool; @@ -250,6 +251,7 @@ protected void healthOperation(Task task, Request request, ClusterState state, A } }, null, request, listener); assertTrue(listener.isDone()); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); if (healthOperationFailure) { try { @@ -283,6 +285,7 @@ public void testDelegateToHealthNodeWithoutParentTask() throws ExecutionExceptio PlainActionFuture listener = new PlainActionFuture<>(); ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), null, request, listener); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; @@ -303,6 +306,7 @@ public void testDelegateToHealthNodeWithParentTask() throws ExecutionException, PlainActionFuture listener = new PlainActionFuture<>(); final CancellableTask task = (CancellableTask) taskManager.register("type", "internal:testAction", request); ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), task, request, listener); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; @@ -327,6 +331,8 @@ public void testHealthNodeOperationWithException() throws InterruptedException { listener ); assertTrue(listener.isDone()); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); + try { listener.get(); fail("A simulated RuntimeException should be thrown"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 3de2b0a5d19a1..559eb4712d7c1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -13,9 +13,8 @@ import java.util.Collections; -public class ObjectMapperMergeTests extends ESTestCase { +public final class ObjectMapperMergeTests extends ESTestCase { - @SuppressWarnings("this-escape") private final RootObjectMapper rootObjectMapper = createMapping(false, true, true, false); private RootObjectMapper createMapping( diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java b/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java index 5f6ebc09f4c0e..93c221a924e0b 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java @@ -14,9 +14,8 @@ import java.io.IOException; -public class SkipStartingWithDigitTokenFilter extends FilteringTokenFilter { +public final class SkipStartingWithDigitTokenFilter extends FilteringTokenFilter { - @SuppressWarnings("this-escape") private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); private final long asciiDigitsToSkip; diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index c81ea1579828f..6f57707cd9e78 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.PlainActionFuture; @@ -413,9 +414,7 @@ public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNo new CompatibilityVersions(transportVersion, Map.of()), Set.of(), DUMMY_REASON, - ActionListener.running(() -> { - throw new AssertionError("should not complete publication"); - }), + createTestListener(), clusterState.term() ) ) @@ -435,9 +434,7 @@ public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List { - throw new AssertionError("should not complete publication"); - }) + createTestListener() ) ), clusterState.term() + between(1, 10) @@ -552,7 +549,7 @@ private , Response extends ActionResp } } - private ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + private static ActionListener createTestListener() { + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 3b114cf0a618e..6243131141497 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.Client; @@ -2664,7 +2665,7 @@ private IngestStats.Stats getPipelineStats(List pipeli } private static List oneTask(DeletePipelineRequest request) { - return List.of(new IngestService.DeletePipelineClusterStateUpdateTask(ActionListener.running(() -> fail("not called")), request)); + return List.of(new IngestService.DeletePipelineClusterStateUpdateTask(ActionTestUtils.assertNoFailureListener(t -> {}), request)); } private static ClusterState executeDelete(DeletePipelineRequest request, ClusterState clusterState) { @@ -2680,7 +2681,7 @@ private static void executeFailingDelete(DeletePipelineRequest request, ClusterS } private static List oneTask(PutPipelineRequest request) { - return List.of(new IngestService.PutPipelineClusterStateUpdateTask(ActionListener.running(() -> fail("not called")), request)); + return List.of(new IngestService.PutPipelineClusterStateUpdateTask(ActionTestUtils.assertNoFailureListener(t -> {}), request)); } private static ClusterState executePut(PutPipelineRequest request, ClusterState clusterState) { diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index a31f25a09c3be..96b4fc90db2e2 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -29,6 +29,7 @@ public class PipelineFactoryTests extends ESTestCase { private final String versionString = version != null ? Integer.toString(version) : null; private final ScriptService scriptService = mock(ScriptService.class); private final Map metadata = randomMapOfMaps(); + private final Boolean deprecated = randomOptionalBoolean(); public void testCreate() throws Exception { Map processorConfig0 = new HashMap<>(); @@ -40,12 +41,14 @@ public void testCreate() throws Exception { if (metadata != null) { pipelineConfig.put(Pipeline.META_KEY, metadata); } + pipelineConfig.put(Pipeline.DEPRECATED_KEY, deprecated); pipelineConfig.put(Pipeline.PROCESSORS_KEY, List.of(Map.of("test", processorConfig0), Map.of("test", processorConfig1))); Map processorRegistry = Map.of("test", new TestProcessor.Factory()); Pipeline pipeline = Pipeline.create("_id", pipelineConfig, processorRegistry, scriptService); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); assertThat(pipeline.getVersion(), equalTo(version)); + assertThat(pipeline.getDeprecated(), equalTo(deprecated)); assertThat(pipeline.getProcessors().size(), equalTo(2)); assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor")); assertThat(pipeline.getProcessors().get(0).getTag(), equalTo("first-processor")); diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java index ab7da6c952450..cfbdbc3792082 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java @@ -157,7 +157,15 @@ public void testPipelineProcessorWithPipelineChain() throws Exception { LongSupplier relativeTimeProvider = mock(LongSupplier.class); when(relativeTimeProvider.getAsLong()).thenReturn(0L); - Pipeline pipeline1 = new Pipeline(pipeline1Id, null, null, null, new CompoundProcessor(pipeline1Processor), relativeTimeProvider); + Pipeline pipeline1 = new Pipeline( + pipeline1Id, + null, + null, + null, + new CompoundProcessor(pipeline1Processor), + relativeTimeProvider, + null + ); String key1 = randomAlphaOfLength(10); relativeTimeProvider = mock(LongSupplier.class); @@ -170,13 +178,14 @@ public void testPipelineProcessorWithPipelineChain() throws Exception { new CompoundProcessor(true, List.of(new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key1, randomInt()); }), pipeline2Processor), List.of()), - relativeTimeProvider + relativeTimeProvider, + null ); relativeTimeProvider = mock(LongSupplier.class); when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(2)); Pipeline pipeline3 = new Pipeline(pipeline3Id, null, null, null, new CompoundProcessor(new TestProcessor(ingestDocument -> { throw new RuntimeException("error"); - })), relativeTimeProvider); + })), relativeTimeProvider, null); when(ingestService.getPipeline(pipeline1Id)).thenReturn(pipeline1); when(ingestService.getPipeline(pipeline2Id)).thenReturn(pipeline2); when(ingestService.getPipeline(pipeline3Id)).thenReturn(pipeline3); diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index d5bc5ad236b54..e30a67c166b5d 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -415,7 +415,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return null; } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java index 81684d749d57c..85088a6031b39 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java @@ -23,8 +23,7 @@ import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.mockito.Mockito.mock; -public class RestGetIndicesActionTests extends ESTestCase { - @SuppressWarnings("this-escape") +public final class RestGetIndicesActionTests extends ESTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); /** diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java index e3a68b971ba42..4830935cafbd3 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java @@ -28,8 +28,7 @@ import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.mockito.Mockito.mock; -public class RestPutIndexTemplateActionTests extends ESTestCase { - @SuppressWarnings("this-escape") +public final class RestPutIndexTemplateActionTests extends ESTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); private RestPutIndexTemplateAction action; diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java index 912cc13f49b3d..d7751709302af 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java @@ -20,9 +20,8 @@ import java.util.List; import java.util.Map; -public class RestDeleteActionTests extends RestActionTestCase { +public final class RestDeleteActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java index fc11557c2ec0d..dd27e33673005 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java @@ -23,8 +23,7 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestGetActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestGetActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java index db859a4a15ff2..c60c016acd709 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java @@ -37,12 +37,11 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class RestGetSourceActionTests extends RestActionTestCase { +public final class RestGetSourceActionTests extends RestActionTestCase { private static RestRequest request = new FakeRestRequest(); private static FakeRestChannel channel = new FakeRestChannel(request, true, 0); private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request); - @SuppressWarnings("this-escape") private final List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java index 3b3a94a54595a..67c730e868192 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java @@ -37,9 +37,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class RestIndexActionTests extends RestActionTestCase { +public final class RestIndexActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); private final AtomicReference clusterStateSupplier = new AtomicReference<>(); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java index 9e1d7e7a5306e..45cfc8510876e 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java @@ -28,10 +28,8 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestMultiGetActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiGetActionTests extends RestActionTestCase { XContentType VND_TYPE = randomVendorType(); - @SuppressWarnings("this-escape") List contentTypeHeader = Collections.singletonList(compatibleMediaType(VND_TYPE, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java index 323861171723a..e8d21ecd10ee5 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java @@ -26,8 +26,7 @@ import java.util.List; import java.util.Map; -public class RestMultiTermVectorsActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiTermVectorsActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java index 7702bda695616..fc40ba365e61d 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java @@ -25,8 +25,7 @@ import java.util.List; import java.util.Map; -public class RestTermVectorsActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestTermVectorsActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java index e7c00ab8bb0de..a9fc1e92506f1 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java @@ -29,8 +29,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.mockito.Mockito.mock; -public class RestUpdateActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestUpdateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); private RestUpdateAction action; diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java index d8a7a4a1dfe52..4498a8344eb98 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java @@ -25,9 +25,8 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestCountActionTests extends RestActionTestCase { +public final class RestCountActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java index d9ae400a860ff..52f5396db1c87 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java @@ -21,8 +21,7 @@ import java.util.List; import java.util.Map; -public class RestExplainActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestExplainActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java index 74bd423cce967..6fadb71652163 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java @@ -25,8 +25,7 @@ import java.util.List; import java.util.Map; -public class RestMultiSearchActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); private RestMultiSearchAction action; diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 3c9f5422c30fa..6d0480048982d 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -28,8 +28,7 @@ import java.util.List; import java.util.Map; -public class RestSearchActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); private RestSearchAction action; diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java index f52f8ecc1aff8..0bb170ed04430 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java @@ -76,19 +76,12 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected KnnVectorQueryBuilder doCreateTestQueryBuilder() { String fieldName = randomBoolean() ? VECTOR_FIELD : VECTOR_ALIAS_FIELD; - byte[] byteVector = new byte[VECTOR_DIMENSION]; float[] vector = new float[VECTOR_DIMENSION]; for (int i = 0; i < vector.length; i++) { - vector[i] = randomFloat(); - byteVector[i] = randomByte(); + vector[i] = elementType().equals(DenseVectorFieldMapper.ElementType.BYTE) ? randomByte() : randomFloat(); } int numCands = randomIntBetween(1, 1000); - - KnnVectorQueryBuilder queryBuilder = switch (elementType()) { - case BYTE -> new KnnVectorQueryBuilder(fieldName, byteVector, numCands, randomBoolean() ? null : randomFloat()); - case FLOAT -> new KnnVectorQueryBuilder(fieldName, vector, numCands, randomBoolean() ? null : randomFloat()); - }; - + KnnVectorQueryBuilder queryBuilder = new KnnVectorQueryBuilder(fieldName, vector, numCands, randomBoolean() ? null : randomFloat()); if (randomBoolean()) { List filters = new ArrayList<>(); int numFilters = randomIntBetween(1, 5); @@ -126,7 +119,12 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery; // The field should always be resolved to the concrete field Query knnVectorQueryBuilt = switch (elementType()) { - case BYTE -> new KnnByteVectorQuery(VECTOR_FIELD, queryBuilder.getByteQueryVector(), queryBuilder.numCands(), filterQuery); + case BYTE -> new KnnByteVectorQuery( + VECTOR_FIELD, + getByteQueryVector(queryBuilder.queryVector()), + queryBuilder.numCands(), + filterQuery + ); case FLOAT -> new KnnFloatVectorQuery(VECTOR_FIELD, queryBuilder.queryVector(), queryBuilder.numCands(), filterQuery); }; if (query instanceof VectorSimilarityQuery vectorSimilarityQuery) { @@ -168,7 +166,7 @@ public void testValidOutput() { { "knn" : { "field" : "vector", - "vector" : [ + "query_vector" : [ 1.0, 2.0, 3.0 @@ -195,26 +193,15 @@ public void testMustRewrite() throws IOException { } public void testBWCVersionSerializationFilters() throws IOException { - float[] bwcFloat = new float[VECTOR_DIMENSION]; KnnVectorQueryBuilder query = createTestQueryBuilder(); - if (query.queryVector() != null) { - bwcFloat = query.queryVector(); - } else { - for (int i = 0; i < query.getByteQueryVector().length; i++) { - bwcFloat[i] = query.getByteQueryVector()[i]; - } - } - - KnnVectorQueryBuilder queryNoFilters = new KnnVectorQueryBuilder(query.getFieldName(), bwcFloat, query.numCands(), null).queryName( - query.queryName() - ).boost(query.boost()); - + KnnVectorQueryBuilder queryNoFilters = new KnnVectorQueryBuilder(query.getFieldName(), query.queryVector(), query.numCands(), null) + .queryName(query.queryName()) + .boost(query.boost()); TransportVersion beforeFilterVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_8_0_0, TransportVersions.V_8_1_0 ); - assertBWCSerialization(query, queryNoFilters, beforeFilterVersion); } @@ -222,7 +209,6 @@ public void testBWCVersionSerializationSimilarity() throws IOException { KnnVectorQueryBuilder query = createTestQueryBuilder(); KnnVectorQueryBuilder queryNoSimilarity = new KnnVectorQueryBuilder( query.getFieldName(), - query.getByteQueryVector(), query.queryVector(), query.numCands(), null @@ -230,27 +216,21 @@ public void testBWCVersionSerializationSimilarity() throws IOException { assertBWCSerialization(query, queryNoSimilarity, TransportVersions.V_8_7_0); } - public void testBWCVersionSerializationByteQuery() throws IOException { - float[] bwcFloat = new float[VECTOR_DIMENSION]; + public void testBWCVersionSerializationQuery() throws IOException { KnnVectorQueryBuilder query = createTestQueryBuilder(); - if (query.queryVector() != null) { - bwcFloat = query.queryVector(); - } else { - for (int i = 0; i < query.getByteQueryVector().length; i++) { - bwcFloat[i] = query.getByteQueryVector()[i]; - } - } - KnnVectorQueryBuilder queryNoByteQuery = new KnnVectorQueryBuilder(query.getFieldName(), bwcFloat, query.numCands(), null) - .queryName(query.queryName()) - .boost(query.boost()) - .addFilterQueries(query.filterQueries()); - - TransportVersion beforeByteQueryVersion = TransportVersionUtils.randomVersionBetween( + TransportVersion differentQueryVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_8_2_0, - TransportVersions.V_8_6_0 + TransportVersions.KNN_AS_QUERY_ADDED ); - assertBWCSerialization(query, queryNoByteQuery, beforeByteQueryVersion); + Float similarity = differentQueryVersion.before(TransportVersions.V_8_8_0) ? null : query.getVectorSimilarity(); + KnnVectorQueryBuilder queryOlderVersion = new KnnVectorQueryBuilder( + query.getFieldName(), + query.queryVector(), + query.numCands(), + similarity + ).queryName(query.queryName()).boost(query.boost()).addFilterQueries(query.filterQueries()); + assertBWCSerialization(query, queryOlderVersion, differentQueryVersion); } private void assertBWCSerialization(QueryBuilder newQuery, QueryBuilder bwcQuery, TransportVersion version) throws IOException { @@ -267,18 +247,11 @@ private void assertBWCSerialization(QueryBuilder newQuery, QueryBuilder bwcQuery } } - @Override - public void testUnknownObjectException() { - assumeTrue("Test isn't relevant, since query is never parsed from xContent", false); - } - - @Override - public void testFromXContent() { - assumeTrue("Test isn't relevant, since query is never parsed from xContent", false); - } - - @Override - public void testUnknownField() { - assumeTrue("Test isn't relevant, since query is never parsed from xContent", false); + private static byte[] getByteQueryVector(float[] queryVector) { + byte[] byteQueryVector = new byte[queryVector.length]; + for (int i = 0; i < queryVector.length; i++) { + byteQueryVector[i] = (byte) queryVector[i]; + } + return byteQueryVector; } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java index 3ee2b56f5d698..a64283c8554b1 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java @@ -128,7 +128,7 @@ public void testSnapshotShardSizes() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { assertThat(indexId.getName(), equalTo(indexName)); assertThat(shardId.id(), allOf(greaterThanOrEqualTo(0), lessThan(numberOfShards))); safeAwait(latch); @@ -192,7 +192,7 @@ public void testErroneousSnapshotShardSizes() throws Exception { final Map results = new ConcurrentHashMap<>(); final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { final InternalSnapshotsInfoService.SnapshotShard snapshotShard = new InternalSnapshotsInfoService.SnapshotShard( new Snapshot("_repo", snapshotId), indexId, @@ -280,7 +280,7 @@ public void testNoLongerMaster() throws Exception { final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return IndexShardSnapshotStatus.newDone(0L, 0L, 0, 0, 0L, randomNonNegativeLong(), null); } }; @@ -316,7 +316,7 @@ public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, In public void testCleanUpSnapshotShardSizes() throws Exception { final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { if (randomBoolean()) { throw new SnapshotException(new Snapshot("_repo", snapshotId), "simulated"); } else { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index e95a7e3782272..f46517b848117 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.snapshots; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -1557,8 +1555,6 @@ public TestClusterNode currentMaster(ClusterState state) { private final class TestClusterNode { - private final Logger logger = LogManager.getLogger(TestClusterNode.class); - private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Stream.concat(ClusterModule.getNamedWriteables().stream(), NetworkModule.getNamedWriteables().stream()).toList() ); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java index 1b8f775bfb23f..eb95e82120570 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.snapshots; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -433,7 +433,7 @@ private static SnapshotsService.ShardSnapshotUpdate successUpdate(Snapshot snaps shardId, null, successfulShardStatus(nodeId), - ActionListener.running(() -> fail("should not complete publication")) + ActionTestUtils.assertNoFailureListener(t -> {}) ); } @@ -443,7 +443,7 @@ private static SnapshotsService.ShardSnapshotUpdate successUpdate(Snapshot snaps null, shardId, successfulShardStatus(nodeId), - ActionListener.running(() -> fail("should not complete publication")) + ActionTestUtils.assertNoFailureListener(t -> {}) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 3d90a253e48c6..a5c17b32173a5 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -941,8 +941,8 @@ protected long transportDelayMillis(String actionName) { return 0; } - public class ClusterNode { - private final Logger logger = LogManager.getLogger(ClusterNode.class); + public final class ClusterNode { + private static final Logger logger = LogManager.getLogger(ClusterNode.class); private final int nodeIndex; Coordinator coordinator; @@ -962,7 +962,6 @@ public class ClusterNode { private ClearableRecycler clearableRecycler; private List blackholedRegisterOperations = new ArrayList<>(); - @SuppressWarnings("this-escape") ClusterNode(int nodeIndex, boolean masterEligible, Settings nodeSettings, NodeHealthService nodeHealthService) { this( nodeIndex, diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java index 3efe729fbe36a..026e1b7b975e5 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java @@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertThat; -public class CoordinationStateTestCluster { +public final class CoordinationStateTestCluster { public static ClusterState clusterState( long term, @@ -181,7 +181,6 @@ void setInitialState(CoordinationMetadata.VotingConfiguration initialConfig, lon final CoordinationMetadata.VotingConfiguration initialConfiguration; final long initialValue; - @SuppressWarnings("this-escape") public CoordinationStateTestCluster(List nodes, ElectionStrategy electionStrategy) { this.electionStrategy = electionStrategy; messages = new ArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java b/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java index 784b82c713d3b..b9528e47745f1 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java @@ -16,8 +16,8 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasable; +import java.util.Collection; import java.util.function.Consumer; -import java.util.stream.StreamSupport; import static org.elasticsearch.test.ESTestCase.fail; import static org.junit.Assert.assertFalse; @@ -35,7 +35,7 @@ private ClusterStateTaskExecutorUtils() { public static ClusterState executeAndAssertSuccessful( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks + Collection tasks ) throws Exception { return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> fail(e)); } @@ -43,7 +43,7 @@ public static ClusterState executeAndAssert public static ClusterState executeAndThrowFirstFailure( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks + Collection tasks ) throws Exception { return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> { throw e; }); } @@ -51,7 +51,7 @@ public static ClusterState executeAndThrowF public static ClusterState executeIgnoringFailures( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks + Collection tasks ) throws Exception { return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> {}); } @@ -59,26 +59,30 @@ public static ClusterState executeIgnoringF public static ClusterState executeHandlingResults( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks, + Collection tasks, CheckedConsumer onTaskSuccess, CheckedBiConsumer onTaskFailure ) throws Exception { - final var taskContexts = StreamSupport.stream(tasks.spliterator(), false).>map( - TestTaskContext::new - ).toList(); - final var resultingState = executor.execute( + final var taskContexts = tasks.stream().map(TestTaskContext::new).toList(); + ClusterState resultingState = executor.execute( new ClusterStateTaskExecutor.BatchExecutionContext<>(originalState, taskContexts, () -> null) ); assertNotNull(resultingState); - for (final var taskContext : taskContexts) { - final var testTaskContext = (TestTaskContext) taskContext; - assertFalse(taskContext + " should have completed", testTaskContext.incomplete()); + boolean allSuccess = true; + for (final var testTaskContext : taskContexts) { + assertFalse(testTaskContext + " should have completed", testTaskContext.incomplete()); if (testTaskContext.succeeded()) { onTaskSuccess.accept(testTaskContext.getTask()); } else { onTaskFailure.accept(testTaskContext.getTask(), testTaskContext.getFailure()); + allSuccess = false; } } + + if (allSuccess) { + taskContexts.forEach(TestTaskContext::onPublishSuccess); + } + return resultingState; } @@ -86,6 +90,7 @@ private static class TestTaskContext impleme private final T task; private Exception failure; private boolean succeeded; + private Runnable onPublishSuccess; TestTaskContext(T task) { this.task = task; @@ -109,6 +114,11 @@ Exception getFailure() { return failure; } + void onPublishSuccess() { + assert onPublishSuccess != null; + onPublishSuccess.run(); + } + @Override public void onFailure(Exception failure) { assert incomplete(); @@ -123,6 +133,7 @@ public void success(Runnable onPublishSuccess, ClusterStateAckListener clusterSt assert clusterStateAckListener != null; assert task == clusterStateAckListener || (task instanceof ClusterStateAckListener == false); this.succeeded = true; + this.onPublishSuccess = onPublishSuccess; } @Override @@ -131,6 +142,7 @@ public void success(Runnable onPublishSuccess) { assert onPublishSuccess != null; assert task instanceof ClusterStateAckListener == false; this.succeeded = true; + this.onPublishSuccess = onPublishSuccess; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java index 12f5989d560aa..181b6c82379ed 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java @@ -134,7 +134,7 @@ public boolean isReadOnly() { public void snapshotShard(SnapshotShardContext context) {} @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return null; } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java index ea94f342a953e..edadf15af5ca4 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -16,6 +15,7 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; public abstract class AbstractTermsTestCase extends ESIntegTestCase { @@ -33,34 +33,43 @@ private static long sumOfDocCounts(Terms terms) { public void testOtherDocCount(String... fieldNames) { for (String fieldName : fieldNames) { - SearchResponse allTerms = prepareSearch("idx").addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(fieldName) - .size(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ).get(); - assertNoFailures(allTerms); + assertResponse( + prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(fieldName) + .size(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ), + allTerms -> { + assertNoFailures(allTerms); - Terms terms = allTerms.getAggregations().get("terms"); - assertEquals(0, terms.getSumOfOtherDocCounts()); // size is 0 - final long sumOfDocCounts = sumOfDocCounts(terms); - final int totalNumTerms = terms.getBuckets().size(); + Terms terms = allTerms.getAggregations().get("terms"); + assertEquals(0, terms.getSumOfOtherDocCounts()); // size is 0 + final long sumOfDocCounts = sumOfDocCounts(terms); + final int totalNumTerms = terms.getBuckets().size(); - for (int size = 1; size < totalNumTerms + 2; size += randomIntBetween(1, 5)) { - for (int shardSize = size; shardSize <= totalNumTerms + 2; shardSize += randomIntBetween(1, 5)) { - SearchResponse resp = prepareSearch("idx").addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(fieldName) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ).get(); - assertNoFailures(resp); - terms = resp.getAggregations().get("terms"); - assertEquals(Math.min(size, totalNumTerms), terms.getBuckets().size()); - assertEquals(sumOfDocCounts, sumOfDocCounts(terms)); + for (int size = 1; size < totalNumTerms + 2; size += randomIntBetween(1, 5)) { + for (int shardSize = size; shardSize <= totalNumTerms + 2; shardSize += randomIntBetween(1, 5)) { + final int finalSize = size; + assertResponse( + prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(fieldName) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ), + response -> { + assertNoFailures(response); + Terms innerTerms = response.getAggregations().get("terms"); + assertEquals(Math.min(finalSize, totalNumTerms), innerTerms.getBuckets().size()); + assertEquals(sumOfDocCounts, sumOfDocCounts(innerTerms)); + } + ); + } + } } - } + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java index 1bdc39fdc8e5f..552a3596916a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java @@ -143,6 +143,7 @@ public void testMultiValuedDocs() throws IOException { testWithSeveralDocs(LuceneTestCase::rarely, null, randomPrecision()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101695") public void testBoundedMultiValuedDocs() throws IOException { int precision = randomPrecision(); testWithSeveralDocs(LuceneTestCase::rarely, randomBBox(precision), precision); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 7b4e591051e61..2138c0f750ac2 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.geo.SpatialPoint; @@ -30,6 +29,7 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertCheckedResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -229,25 +229,28 @@ public void setupSuiteScopeCluster() throws Exception { // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type. - SearchResponse response = prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) - .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)) - .setSize(5000) - .get(); - assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; - XContentBuilder builder = XContentFactory.jsonBuilder(); - ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); - logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); - for (int i = 0; i < totalHits; i++) { - SearchHit searchHit = response.getHits().getAt(i); - assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); - DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME); - - assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); - Long value = hitField.getValue(); - assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i)); - } - assertThat(totalHits, equalTo(2000L)); + assertCheckedResponse( + prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) + .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)) + .setSize(5000), + response -> { + assertNoFailures(response); + long totalHits = response.getHits().getTotalHits().value; + XContentBuilder builder = XContentFactory.jsonBuilder(); + ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); + logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); + for (int i = 0; i < totalHits; i++) { + SearchHit searchHit = response.getHits().getAt(i); + assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); + DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME); + + assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); + Long value = hitField.getValue(); + assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i)); + } + assertThat(totalHits, equalTo(2000L)); + } + ); } private SpatialPoint computeCentroid(SpatialPoint[] points) { diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 6abce04556316..ed6f0e1c87f2a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.GeometryNormalizer; @@ -47,7 +46,8 @@ import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -111,22 +111,24 @@ public void testIndexPointsFilterRectangle() throws Exception { .get(); Geometry geometry = new Rectangle(-45, 45, 45, -45); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); // default query, without specifying relation (expect intersects) - searchResponse = client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)).get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsCircle() throws Exception { @@ -177,14 +179,15 @@ public void testIndexPointsPolygon() throws Exception { Polygon polygon = new Polygon(new LinearRing(new double[] { -35, -35, -25, -25, -35 }, new double[] { -35, -25, -25, -35, -35 })); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); - assertThat(searchHits.getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), + response -> { + SearchHits searchHits = response.getHits(); + assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsMultiPolygon() throws Exception { @@ -218,47 +221,44 @@ public void testIndexPointsMultiPolygon() throws Exception { ); MultiPolygon multiPolygon = new MultiPolygon(List.of(encloseDocument1Cb, encloseDocument2Cb)); - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); - assertThat(searchResponse.getHits().getAt(1).getId(), not(equalTo("2"))); - } - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); - assertThat(searchResponse.getHits().getAt(1).getId(), not(equalTo("2"))); - } - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); - } - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(0)); - } + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); + assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); + } + ); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); + assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); + } + ); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + } + ); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getHits().length, equalTo(0)); + } + ); } public void testIndexPointsRectangle() throws Exception { @@ -279,14 +279,15 @@ public void testIndexPointsRectangle() throws Exception { Rectangle rectangle = new Rectangle(-50, -40, -45, -55); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + } + ); } public void testIndexPointsIndexedRectangle() throws Exception { @@ -332,30 +333,31 @@ public void testIndexPointsIndexedRectangle() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery( - queryBuilder().shapeQuery(defaultFieldName, "shape1") - .relation(ShapeRelation.INTERSECTS) - .indexedShapeIndex(indexedShapeIndex) - .indexedShapePath(indexedShapePath) - ) - .get(); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery( + queryBuilder().shapeQuery(defaultFieldName, "shape1") + .relation(ShapeRelation.INTERSECTS) + .indexedShapeIndex(indexedShapeIndex) + .indexedShapePath(indexedShapePath) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); + } + ); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("point2")); - - searchResponse = client().prepareSearch(defaultIndexName) - .setQuery( - queryBuilder().shapeQuery(defaultFieldName, "shape2") - .relation(ShapeRelation.INTERSECTS) - .indexedShapeIndex(indexedShapeIndex) - .indexedShapePath(indexedShapePath) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery( + queryBuilder().shapeQuery(defaultFieldName, "shape2") + .relation(ShapeRelation.INTERSECTS) + .indexedShapeIndex(indexedShapeIndex) + .indexedShapePath(indexedShapePath) + ), + 0L + ); } public void testWithInQueryLine() throws Exception { @@ -421,34 +423,29 @@ public void testQueryPoint() throws Exception { .get(); Point point = new Point(-35, -25); - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, point)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)), + 0L + ); } public void testQueryMultiPoint() throws Exception { @@ -463,34 +460,28 @@ public void testQueryMultiPoint() throws Exception { MultiPoint multiPoint = new MultiPoint(List.of(new Point(-35, -25), new Point(-15, -5))); - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.WITHIN)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.CONTAINS)), + 0L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.DISJOINT)), + 0L + ); } public void testQueryPointFromGeoJSON() throws Exception { @@ -507,34 +498,30 @@ public void testQueryPointFromGeoJSON() throws Exception { client().index(new IndexRequest(defaultIndexName).id("1").source(doc1, XContentType.JSON).setRefreshPolicy(IMMEDIATE)).actionGet(); Point point = new Point(-35, -25); - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, point)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)), + 0L + ); + } /** @@ -570,34 +557,34 @@ public void testQueryPointFromMultiPoint() throws Exception { for (Point point : new Point[] { pointA, pointB, pointC, pointD, pointInvalid }) { int expectedDocs = point.equals(pointInvalid) ? 0 : 1; int disjointDocs = point.equals(pointInvalid) ? 1 : 0; - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc matches %s" + point, expectedDocs, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc WITHIN %s" + point, 0, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc CONTAINS %s" + point, expectedDocs, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc DISJOINT with %s" + point, disjointDocs, searchHits.getTotalHits().value); - } + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)), + expectedDocs + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)), + 0L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)), + expectedDocs + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)), + disjointDocs + ); } } @@ -617,13 +604,12 @@ public void testIndexPointsFromLine() throws Exception { } client().admin().indices().prepareRefresh(defaultIndexName).get(); // all points from a line intersect with the line - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, line).relation(ShapeRelation.INTERSECTS)) - .get(); - assertNoFailures(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo((long) line.length())); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, line).relation(ShapeRelation.INTERSECTS)), + line.length() + ); } public void testIndexPointsFromPolygon() throws Exception { @@ -643,13 +629,12 @@ public void testIndexPointsFromPolygon() throws Exception { } client().admin().indices().prepareRefresh(defaultIndexName).get(); // all points from a polygon intersect with the polygon - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) - .get(); - assertNoFailures(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo((long) linearRing.length())); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), + linearRing.length() + ); } /** Only LegacyGeoShape has limited support, so other tests will ignore nothing */ diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index fc8510330fd25..751a4d835610d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.geometry.LinearRing; import org.elasticsearch.geometry.MultiPolygon; @@ -21,6 +20,7 @@ import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; @@ -65,11 +65,12 @@ public void testRectangleSpanningDateline(BasePointShapeQueryTestCase { + SearchHits searchHits = response.getHits(); + assertEquals(2, searchHits.getTotalHits().value); + assertNotEquals("1", searchHits.getAt(0).getId()); + assertNotEquals("1", searchHits.getAt(1).getId()); + }); } public void testPolygonSpanningDateline(BasePointShapeQueryTestCase tests) throws Exception { @@ -108,13 +109,14 @@ public void testPolygonSpanningDateline(BasePointShapeQueryTestCase { + SearchHits searchHits = response.getHits(); + assertEquals(2, searchHits.getTotalHits().value); + assertNotEquals("1", searchHits.getAt(0).getId()); + assertNotEquals("4", searchHits.getAt(0).getId()); + assertNotEquals("1", searchHits.getAt(1).getId()); + assertNotEquals("4", searchHits.getAt(1).getId()); + }); } public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase tests) throws Exception { @@ -150,10 +152,11 @@ public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase { + SearchHits searchHits = response.getHits(); + assertEquals(2, searchHits.getTotalHits().value); + assertNotEquals("3", searchHits.getAt(0).getId()); + assertNotEquals("3", searchHits.getAt(1).getId()); + }); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java index 29307f7f63ce9..8397dece4f537 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; import org.apache.lucene.util.SloppyMath; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.query.GeoShapeQueryBuilder; @@ -22,6 +21,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -57,17 +57,11 @@ public void testIndexPolygonDateLine() throws Exception { indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))), 1L); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))), 0L); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))), 1L); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))), 1L); - searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - - searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } /** The testBulk method uses this only for Geo-specific tests */ @@ -77,23 +71,24 @@ protected void doDistanceAndBoundingBoxTest(String key) { 53 ); - SearchResponse distance = prepareSearch().addStoredField("pin") - .setQuery(geoDistanceQuery("pin").distance("425km").point(51.11, 9.851)) - .get(); - - assertHitCount(distance, 5); - GeoPoint point = new GeoPoint(); - for (SearchHit hit : distance.getHits()) { - String name = hit.getId(); - point.resetFromString(hit.getFields().get("pin").getValue()); - double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); - - assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); - assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); - if (key.equals(name)) { - assertThat(dist, closeTo(0d, 0.1d)); + assertResponse( + prepareSearch().addStoredField("pin").setQuery(geoDistanceQuery("pin").distance("425km").point(51.11, 9.851)), + response -> { + assertHitCount(response, 5L); + GeoPoint point = new GeoPoint(); + for (SearchHit hit : response.getHits()) { + String name = hit.getId(); + point.resetFromString(hit.getFields().get("pin").getValue()); + double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); + + assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); + assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); + if (key.equals(name)) { + assertThat(dist, closeTo(0d, 0.1d)); + } + } } - } + ); } private static double distance(double lat1, double lon1, double lat2, double lon2) { diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index b8f5cdf00da34..5bd3a3ba69f2f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -10,7 +10,6 @@ import org.apache.lucene.tests.geo.GeoTestUtil; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.GeometryNormalizer; @@ -37,6 +36,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public abstract class GeoShapeQueryTestCase extends BaseShapeQueryTestCase { @@ -141,11 +141,11 @@ public void testEnvelopeSpanningDateline() throws Exception { } } ); - - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()).get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertNotEquals("1", response.getHits().getAt(0).getId()); - assertNotEquals("1", response.getHits().getAt(1).getId()); + assertResponse(client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()), response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertNotEquals("1", response.getHits().getAt(0).getId()); + assertNotEquals("1", response.getHits().getAt(1).getId()); + }); } public void testIndexRectangleSpanningDateLine() throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index d23b79ed0cde2..00b16625d17ad 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -42,9 +42,9 @@ import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; -public class BackgroundIndexer implements AutoCloseable { +public final class BackgroundIndexer implements AutoCloseable { - private final Logger logger = LogManager.getLogger(getClass()); + private static final Logger logger = LogManager.getLogger(BackgroundIndexer.class); final Thread[] writers; final Client client; @@ -98,7 +98,6 @@ public BackgroundIndexer(String index, Client client, int numOfDocs, final int w * @param autoStart set to true to start indexing as soon as all threads have been created. * @param random random instance to use */ - @SuppressWarnings("this-escape") public BackgroundIndexer( final String index, final Client client, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 3c458a43f91b3..d3f01f03ed61c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -45,6 +45,7 @@ import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; @@ -811,6 +812,10 @@ public static boolean randomBoolean() { return random().nextBoolean(); } + public static Boolean randomOptionalBoolean() { + return randomBoolean() ? Boolean.TRUE : randomFrom(Boolean.FALSE, null); + } + public static byte randomByte() { return (byte) random().nextInt(); } @@ -1003,6 +1008,10 @@ public static String randomIdentifier() { return randomAlphaOfLengthBetween(8, 12).toLowerCase(Locale.ROOT); } + public static String randomUUID() { + return UUIDs.randomBase64UUID(random()); + } + public static String randomUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) { return RandomizedTest.randomUnicodeOfLengthBetween(minCodeUnits, maxCodeUnits); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index bd0f2c88c2219..8abf10a773764 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -174,7 +174,7 @@ */ public final class InternalTestCluster extends TestCluster { - private final Logger logger = LogManager.getLogger(getClass()); + private static final Logger logger = LogManager.getLogger(InternalTestCluster.class); private static final Predicate DATA_NODE_PREDICATE = nodeAndClient -> DiscoveryNode.canContainData( nodeAndClient.node.settings() diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestClustersThreadFilter.java b/test/framework/src/main/java/org/elasticsearch/test/TestClustersThreadFilter.java new file mode 100644 index 0000000000000..c2dbeecb09bfb --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/TestClustersThreadFilter.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import com.carrotsearch.randomizedtesting.ThreadFilter; + +/** + * When using shared test clusters we launch processes that persist across test suites. This filter is used to ignore those in that case. + */ +public class TestClustersThreadFilter implements ThreadFilter { + @Override + public boolean reject(Thread t) { + return t.getName().endsWith("-log-forwarder") || t.getName().contains("node-executor"); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index e0b268c4b2743..e35e79fd67c21 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -37,6 +37,8 @@ */ public final class MockEngineSupport { + private static final Logger logger = LogManager.getLogger(Engine.class); + /** * Allows tests to wrap an index reader randomly with a given ratio. This * is disabled by default ie. {@code 0.0d} since reader wrapping is insanely @@ -58,7 +60,6 @@ public final class MockEngineSupport { ); private final AtomicBoolean closing = new AtomicBoolean(false); - private final Logger logger = LogManager.getLogger(Engine.class); private final ShardId shardId; private final InFlightSearchers inFlightSearchers; private final MockContext mockContext; @@ -178,7 +179,7 @@ public Engine.Searcher wrapSearcher(Engine.Searcher searcher) { * early. - good news, stuff will fail all over the place if we don't * get this right here */ - SearcherCloseable closeable = new SearcherCloseable(searcher, logger, inFlightSearchers); + SearcherCloseable closeable = new SearcherCloseable(searcher, inFlightSearchers); return new Engine.Searcher( searcher.source(), reader, @@ -222,12 +223,10 @@ private static final class SearcherCloseable implements Closeable { private RuntimeException firstReleaseStack; private final Object lock = new Object(); private final int initialRefCount; - private final Logger logger; private final AtomicBoolean closed = new AtomicBoolean(false); - SearcherCloseable(final Engine.Searcher searcher, Logger logger, InFlightSearchers inFlightSearchers) { + SearcherCloseable(final Engine.Searcher searcher, InFlightSearchers inFlightSearchers) { this.searcher = searcher; - this.logger = logger; initialRefCount = searcher.getIndexReader().getRefCount(); this.inFlightSearchers = inFlightSearchers; assert initialRefCount > 0 diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 26d55885fe7a3..1e1e1f084016a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -209,19 +209,7 @@ public void initClient() throws IOException { assert clusterHosts == null; assert availableFeatures == null; assert nodeVersions == null; - String cluster = getTestRestCluster(); - String[] stringUrls = cluster.split(","); - List hosts = new ArrayList<>(stringUrls.length); - for (String stringUrl : stringUrls) { - int portSeparator = stringUrl.lastIndexOf(':'); - if (portSeparator < 0) { - throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); - } - String host = stringUrl.substring(0, portSeparator); - int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); - hosts.add(buildHttpHost(host, port)); - } - clusterHosts = unmodifiableList(hosts); + clusterHosts = parseClusterHosts(getTestRestCluster()); logger.info("initializing REST clients against {}", clusterHosts); client = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); adminClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); @@ -281,6 +269,21 @@ protected static boolean has(ProductFeature feature) { return availableFeatures.contains(feature); } + protected List parseClusterHosts(String hostsString) { + String[] stringUrls = hostsString.split(","); + List hosts = new ArrayList<>(stringUrls.length); + for (String stringUrl : stringUrls) { + int portSeparator = stringUrl.lastIndexOf(':'); + if (portSeparator < 0) { + throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); + } + String host = stringUrl.substring(0, portSeparator); + int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); + hosts.add(buildHttpHost(host, port)); + } + return unmodifiableList(hosts); + } + protected String getTestRestCluster() { String cluster = System.getProperty("tests.rest.cluster"); if (cluster == null) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java index c20e1ce70e601..1229b3470775f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java @@ -74,11 +74,10 @@ protected void dispatchRequest(RestRequest request) { * By default, will throw {@link AssertionError} when any execution method is called, unless configured otherwise using * {@link #setExecuteVerifier} or {@link #setExecuteLocallyVerifier}. */ - public static class VerifyingClient extends NoOpNodeClient { + public static final class VerifyingClient extends NoOpNodeClient { AtomicReference, ActionRequest, ActionResponse>> executeVerifier = new AtomicReference<>(); AtomicReference, ActionRequest, ActionResponse>> executeLocallyVerifier = new AtomicReference<>(); - @SuppressWarnings("this-escape") public VerifyingClient(String testName) { super(testName); reset(); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java index ddbea2c562db8..05d6eca0d021d 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/DisruptableMockTransport.java @@ -38,7 +38,7 @@ public abstract class DisruptableMockTransport extends MockTransport { private final DiscoveryNode localNode; - private final Logger logger = LogManager.getLogger(DisruptableMockTransport.class); + private static final Logger logger = LogManager.getLogger(DisruptableMockTransport.class); private final DeterministicTaskQueue deterministicTaskQueue; private final List blackholedRequests = new ArrayList<>(); private final Set blockedActions = new HashSet<>(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java index d06be2463cc5b..78dbb8fb1f591 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java @@ -25,6 +25,7 @@ public abstract class AbstractLocalClusterSpecBuilder> implements LocalClusterSpecBuilder { private String name = "test-cluster"; + private boolean shared = false; private final List nodeBuilders = new ArrayList<>(); private final List users = new ArrayList<>(); private final List roleFiles = new ArrayList<>(); @@ -107,12 +108,34 @@ public AbstractLocalClusterSpecBuilder rolesFile(Resource rolesFile) { return this; } + @Override + public LocalClusterSpecBuilder shared(Boolean isShared) { + if (Integer.parseInt(System.getProperty("tests.max.parallel.forks")) > 1) { + String taskPath = System.getProperty("tests.task"); + String project = taskPath.substring(0, taskPath.lastIndexOf(':')); + String taskName = taskPath.substring(taskPath.lastIndexOf(':') + 1); + + throw new IllegalStateException( + "Parallel test execution is not supported for shared clusters. Configure the build script for project '" + + project + + "':\n\n" + + "tasks.named('" + + taskName + + "') {\n" + + " maxParallelForks = 1\n" + + "}" + ); + } + this.shared = isShared; + return this; + } + protected LocalClusterSpec buildClusterSpec() { // Apply lazily provided configuration lazyConfigProviders.forEach(s -> s.get().apply(this)); List clusterUsers = users.isEmpty() ? List.of(User.DEFAULT_USER) : users; - LocalClusterSpec clusterSpec = new LocalClusterSpec(name, clusterUsers, roleFiles); + LocalClusterSpec clusterSpec = new LocalClusterSpec(name, clusterUsers, roleFiles, shared); List nodeSpecs; if (nodeBuilders.isEmpty()) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java index d2a9564d05c10..4b20afcf1e8b4 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java @@ -14,9 +14,8 @@ import org.elasticsearch.test.cluster.local.distribution.SnapshotDistributionResolver; import org.elasticsearch.test.cluster.util.resource.Resource; -public class DefaultLocalClusterSpecBuilder extends AbstractLocalClusterSpecBuilder { +public final class DefaultLocalClusterSpecBuilder extends AbstractLocalClusterSpecBuilder { - @SuppressWarnings("this-escape") public DefaultLocalClusterSpecBuilder() { super(); this.apply(c -> c.systemProperty("ingest.geoip.downloader.enabled.default", "false")); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index ea15f770b4e6c..38d83e7652e98 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -15,6 +15,9 @@ import org.junit.runners.model.Statement; import java.io.InputStream; +import java.lang.annotation.Annotation; +import java.lang.reflect.InvocationTargetException; +import java.util.Arrays; import java.util.function.Supplier; public class DefaultLocalElasticsearchCluster implements ElasticsearchCluster { @@ -32,13 +35,20 @@ public Statement apply(Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { + S spec = specProvider.get(); try { - S spec = specProvider.get(); - handle = clusterFactory.create(spec); - handle.start(); + if (spec.isShared() == false || handle == null) { + if (spec.isShared()) { + maybeCheckThreadLeakFilters(description); + } + handle = clusterFactory.create(spec); + handle.start(); + } base.evaluate(); } finally { - close(); + if (spec.isShared() == false) { + close(); + } } } }; @@ -155,4 +165,32 @@ protected void checkHandle() { throw new IllegalStateException("Cluster handle has not been initialized. Did you forget the @ClassRule annotation?"); } } + + /** + * Check for {@code TestClustersThreadFilter} if necessary. We use reflection here to avoid a dependency on randomized runner. + */ + @SuppressWarnings("unchecked") + private void maybeCheckThreadLeakFilters(Description description) { + try { + Class threadLeakFiltersClass = (Class) Class.forName( + "com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters" + ); + Annotation[] annotations = description.getTestClass().getAnnotationsByType(threadLeakFiltersClass); + for (Annotation annotation : annotations) { + try { + Class[] classes = (Class[]) annotation.getClass().getMethod("filters").invoke(annotation); + if (Arrays.stream(classes).noneMatch(c -> c.getName().equals("org.elasticsearch.test.TestClustersThreadFilter"))) { + throw new IllegalStateException( + "TestClustersThreadFilter is required when using shared clusters. Annotate your test with the following:\n\n" + + " @ThreadLeakFilters(filters = TestClustersThreadFilter.class)\n" + ); + } + } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException("Unable to inspect filters on " + annotation, e); + } + } + } catch (ClassNotFoundException e) { + // If randomized runner isn't on the classpath then we don't care + } + } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java index 4c3608a181911..e87f370e2b592 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java @@ -27,12 +27,14 @@ public class LocalClusterSpec implements ClusterSpec { private final String name; private final List users; private final List roleFiles; + private final boolean shared; private List nodes; - public LocalClusterSpec(String name, List users, List roleFiles) { + public LocalClusterSpec(String name, List users, List roleFiles, boolean shared) { this.name = name; this.users = users; this.roleFiles = roleFiles; + this.shared = shared; } public String getName() { @@ -51,6 +53,10 @@ public List getNodes() { return nodes; } + public boolean isShared() { + return shared; + } + public void setNodes(List nodes) { this.nodes = nodes; } @@ -281,7 +287,7 @@ public Map resolveEnvironment() { * @return a new local node spec */ private LocalNodeSpec getFilteredSpec(SettingsProvider filteredProvider, SettingsProvider filteredKeystoreProvider) { - LocalClusterSpec newCluster = new LocalClusterSpec(cluster.name, cluster.users, cluster.roleFiles); + LocalClusterSpec newCluster = new LocalClusterSpec(cluster.name, cluster.users, cluster.roleFiles, cluster.shared); List nodeSpecs = cluster.nodes.stream() .map( diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpecBuilder.java index cca1a607d0bf7..5112f7dd97d79 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpecBuilder.java @@ -67,5 +67,13 @@ public interface LocalClusterSpecBuilder extends */ LocalClusterSpecBuilder rolesFile(Resource rolesFile); + /** + * Configure whether this cluster should be shared across test suites (classes). If set to {@code true} then the cluster will not be + * shut down or recreated before the next test suite begins execution. This setting is {@code false} by default. + * + * @param isShared whether the cluster should be shared + */ + LocalClusterSpecBuilder shared(Boolean isShared); + T build(); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ProcessUtils.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ProcessUtils.java index bcbf6cd9bbc37..c929f20195611 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ProcessUtils.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ProcessUtils.java @@ -167,6 +167,6 @@ private static void startLoggingThread(InputStream is, Consumer logAppen } catch (IOException e) { throw new UncheckedIOException("Error reading output from process.", e); } - }, name).start(); + }, name + "-log-forwarder").start(); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java index 88e039b6013e4..04d525112aed6 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java @@ -25,11 +25,10 @@ import java.io.IOException; import java.util.Map; -public class HistoBackedHistogramAggregator extends AbstractHistogramAggregator { +public final class HistoBackedHistogramAggregator extends AbstractHistogramAggregator { private final HistogramValuesSource.Histogram valuesSource; - @SuppressWarnings("this-escape") public HistoBackedHistogramAggregator( String name, AggregatorFactories factories, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java index 2b80084e2d92c..8be41867e9bb8 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java @@ -32,7 +32,7 @@ * Average aggregator operating over histogram datatypes {@link HistogramValuesSource} * The aggregation computes weighted average by taking counts into consideration for each value */ -public class HistoBackedAvgAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedAvgAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; @@ -41,7 +41,6 @@ public class HistoBackedAvgAggregator extends NumericMetricsAggregator.SingleVal DoubleArray compensations; DocValueFormat format; - @SuppressWarnings("this-escape") public HistoBackedAvgAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java index f094c22e4dff8..c153225e4577a 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java @@ -26,13 +26,12 @@ import java.io.IOException; import java.util.Map; -public class HistoBackedMaxAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedMaxAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; final DocValueFormat formatter; DoubleArray maxes; - @SuppressWarnings("this-escape") public HistoBackedMaxAggregator( String name, ValuesSourceConfig config, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java index ecf89f8eab15f..5efd279ea6eeb 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java @@ -26,13 +26,12 @@ import java.io.IOException; import java.util.Map; -public class HistoBackedMinAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedMinAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; final DocValueFormat format; DoubleArray mins; - @SuppressWarnings("this-escape") public HistoBackedMinAggregator( String name, ValuesSourceConfig config, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java index ebf1a43e38e2a..55428a7f97232 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java @@ -33,7 +33,7 @@ * The aggregator sums each histogram value multiplied by its count. * Eg for a histogram of response times, this is an approximate "total time spent". */ -public class HistoBackedSumAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedSumAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; private final DocValueFormat format; @@ -41,7 +41,6 @@ public class HistoBackedSumAggregator extends NumericMetricsAggregator.SingleVal private DoubleArray sums; private DoubleArray compensations; - @SuppressWarnings("this-escape") public HistoBackedSumAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java index 184fd7072fa65..b3f5cd88730ae 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java @@ -29,14 +29,13 @@ * The aggregation counts the number of values a histogram field has within the aggregation context * by adding the counts of the histograms. */ -public class HistoBackedValueCountAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedValueCountAggregator extends NumericMetricsAggregator.SingleValue { final HistogramValuesSource.Histogram valuesSource; /** Count per bucket */ LongArray counts; - @SuppressWarnings("this-escape") public HistoBackedValueCountAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java index 2c2f213cf430b..5eefa7cfc56fa 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java @@ -28,15 +28,15 @@ import java.io.IOException; import java.util.Map; -public class TimeSeriesRateAggregator extends NumericMetricsAggregator.SingleValue { +public final class TimeSeriesRateAggregator extends NumericMetricsAggregator.SingleValue { - protected final ValuesSource.Numeric valuesSource; + private final ValuesSource.Numeric valuesSource; - protected DoubleArray startValues; - protected DoubleArray endValues; - protected LongArray startTimes; - protected LongArray endTimes; - protected DoubleArray resetCompensations; + private DoubleArray startValues; + private DoubleArray endValues; + private LongArray startTimes; + private LongArray endTimes; + private DoubleArray resetCompensations; private long currentBucket = -1; private long currentEndTime = -1; @@ -49,8 +49,7 @@ public class TimeSeriesRateAggregator extends NumericMetricsAggregator.SingleVal private final Rounding.DateTimeUnit rateUnit; // Unused parameters are so that the constructor implements `RateAggregatorSupplier` - @SuppressWarnings("this-escape") - protected TimeSeriesRateAggregator( + TimeSeriesRateAggregator( String name, ValuesSourceConfig valuesSourceConfig, Rounding.DateTimeUnit rateUnit, diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 374897b30f038..2538ee613b96f 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -845,16 +845,22 @@ private int readMultiRegions( // nothing to read, skip continue; } - final CacheFileRegion fileRegion = get(cacheKey, length, region); - final long regionStart = getRegionStart(region); - fileRegion.populateAndRead( - mapSubRangeToRegion(rangeToWrite, region), - subRangeToRead, - readerWithOffset(reader, fileRegion, Math.toIntExact(rangeToRead.start() - regionStart)), - writerWithOffset(writer, fileRegion, Math.toIntExact(rangeToWrite.start() - regionStart)), - ioExecutor, - listeners.acquire(i -> bytesRead.updateAndGet(j -> Math.addExact(i, j))) - ); + ActionListener listener = listeners.acquire(i -> bytesRead.updateAndGet(j -> Math.addExact(i, j))); + try { + final CacheFileRegion fileRegion = get(cacheKey, length, region); + final long regionStart = getRegionStart(region); + fileRegion.populateAndRead( + mapSubRangeToRegion(rangeToWrite, region), + subRangeToRead, + readerWithOffset(reader, fileRegion, Math.toIntExact(rangeToRead.start() - regionStart)), + writerWithOffset(writer, fileRegion, Math.toIntExact(rangeToWrite.start() - regionStart)), + ioExecutor, + listener + ); + } catch (Exception e) { + assert e instanceof AlreadyClosedException : e; + listener.onFailure(e); + } } } readsComplete.get(); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index b68a115850fe8..8b8af3dae2fef 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -531,7 +531,7 @@ public void testCcrRepositoryFetchesSnapshotShardSizeFromIndexShardStoreStats() new SnapshotId(CcrRepository.LATEST, CcrRepository.LATEST), new IndexId(indexStats.getIndex(), indexStats.getUuid()), new ShardId(new Index(indexStats.getIndex(), indexStats.getUuid()), shardId) - ).asCopy(); + ); assertThat(indexShardSnapshotStatus, notNullValue()); assertThat(indexShardSnapshotStatus.getStage(), is(IndexShardSnapshotStatus.Stage.DONE)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java index 30c20f419ebba..7a05a4e712fc4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java @@ -33,7 +33,7 @@ /** * A {@link ClusterStateListener} that completes any {@link ShardFollowTask} which concerns a deleted index. */ -public class ShardFollowTaskCleaner implements ClusterStateListener { +public final class ShardFollowTaskCleaner implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(ShardFollowTaskCleaner.class); @@ -45,7 +45,6 @@ public class ShardFollowTaskCleaner implements ClusterStateListener { */ private final Set completing = Collections.synchronizedSet(new HashSet<>()); - @SuppressWarnings("this-escape") public ShardFollowTaskCleaner(final ClusterService clusterService, final ThreadPool threadPool, final Client client) { this.threadPool = threadPool; this.client = client; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index f53e7bb562122..b73aab1dbfd55 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -88,7 +88,7 @@ import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapClient; import static org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction.extractLeaderShardHistoryUUIDs; -public class ShardFollowTasksExecutor extends PersistentTasksExecutor { +public final class ShardFollowTasksExecutor extends PersistentTasksExecutor { private static final Logger logger = LogManager.getLogger(ShardFollowTasksExecutor.class); @@ -100,7 +100,6 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor imp // is null, then no trial has been exercised. We keep the version to leave open the possibility that we // may eventually allow a cluster to exercise a trial every time they upgrade to a new major version. @Nullable - private Version trialVersion; + private TrialLicenseVersion trialLicenseVersion; - public LicensesMetadata(License license, Version trialVersion) { + public LicensesMetadata(License license, TrialLicenseVersion trialLicenseVersion) { this.license = license; - this.trialVersion = trialVersion; + this.trialLicenseVersion = trialLicenseVersion; } public License getLicense() { @@ -69,19 +69,19 @@ public License getLicense() { } boolean isEligibleForTrial() { - if (trialVersion == null) { + if (trialLicenseVersion == null) { return true; } - return Version.CURRENT.major > trialVersion.major; + return trialLicenseVersion.ableToStartNewTrialSince(TrialLicenseVersion.CURRENT); } - Version getMostRecentTrialVersion() { - return trialVersion; + TrialLicenseVersion getMostRecentTrialVersion() { + return trialLicenseVersion; } @Override public String toString() { - return "LicensesMetadata{" + "license=" + license + ", trialVersion=" + trialVersion + '}'; + return "LicensesMetadata{" + "license=" + license + ", trialVersion=" + trialLicenseVersion + '}'; } @Override @@ -91,13 +91,13 @@ public boolean equals(Object o) { LicensesMetadata that = (LicensesMetadata) o; - return Objects.equals(license, that.license) && Objects.equals(trialVersion, that.trialVersion); + return Objects.equals(license, that.license) && Objects.equals(trialLicenseVersion, that.trialLicenseVersion); } @Override public int hashCode() { int result = license != null ? license.hashCode() : 0; - result = 31 * result + (trialVersion != null ? trialVersion.hashCode() : 0); + result = 31 * result + (trialLicenseVersion != null ? trialLicenseVersion.hashCode() : 0); return result; } @@ -118,7 +118,7 @@ public EnumSet context() { public static LicensesMetadata fromXContent(XContentParser parser) throws IOException { License license = LICENSE_TOMBSTONE; - Version trialLicense = null; + TrialLicenseVersion trialLicense = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -133,7 +133,7 @@ public static LicensesMetadata fromXContent(XContentParser parser) throws IOExce } } else if (fieldName.equals(Fields.TRIAL_LICENSE)) { parser.nextToken(); - trialLicense = Version.fromString(parser.text()); + trialLicense = TrialLicenseVersion.fromXContent(parser.text()); } } } @@ -151,8 +151,8 @@ public Iterator toXContentChunked(ToXContent.Params ignore license.toInnerXContent(builder, params); builder.endObject(); } - if (trialVersion != null) { - builder.field(Fields.TRIAL_LICENSE, trialVersion.toString()); + if (trialLicenseVersion != null) { + builder.field(Fields.TRIAL_LICENSE, trialLicenseVersion.toString()); } return builder; })); @@ -166,11 +166,11 @@ public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeBoolean(true); // has a license license.writeTo(streamOutput); } - if (trialVersion == null) { + if (trialLicenseVersion == null) { streamOutput.writeBoolean(false); } else { streamOutput.writeBoolean(true); - Version.writeVersion(trialVersion, streamOutput); + trialLicenseVersion.writeTo(streamOutput); } } @@ -182,7 +182,7 @@ public LicensesMetadata(StreamInput streamInput) throws IOException { } boolean hasExercisedTrial = streamInput.readBoolean(); if (hasExercisedTrial) { - this.trialVersion = Version.readVersion(streamInput); + this.trialLicenseVersion = new TrialLicenseVersion(streamInput); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java index adc0d66353608..1953a31c452ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java @@ -7,7 +7,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; @@ -15,6 +14,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.core.Nullable; +import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.xpack.core.XPackPlugin; import java.time.Clock; @@ -78,7 +78,7 @@ public LicensesMetadata execute( return currentLicensesMetadata; } } - Version trialVersion = currentLicensesMetadata != null ? currentLicensesMetadata.getMostRecentTrialVersion() : null; + TrialLicenseVersion trialVersion = currentLicensesMetadata != null ? currentLicensesMetadata.getMostRecentTrialVersion() : null; updatedLicensesMetadata = new LicensesMetadata(selfGeneratedLicense, trialVersion); } else { updatedLicensesMetadata = currentLicensesMetadata; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java index 814634fc046ee..02b4bc15eaaee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java @@ -7,7 +7,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; @@ -15,6 +14,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.core.Nullable; +import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.xpack.core.XPackPlugin; import java.time.Clock; @@ -62,6 +62,15 @@ private LicensesMetadata execute( ClusterStateTaskExecutor.TaskContext taskContext ) { assert taskContext.getTask() == this; + if (discoveryNodes.getMaxNodeVersion().after(discoveryNodes.getSmallestNonClientNodeVersion())) { + throw new IllegalStateException( + "Please ensure all nodes are on the same version before starting your trial, the highest node version in this cluster is [" + + discoveryNodes.getMaxNodeVersion() + + "] and the lowest node version is [" + + discoveryNodes.getMinNodeVersion() + + "]" + ); + } final var listener = ActionListener.runBefore(this.listener, () -> { logger.debug("started self generated trial license: {}", currentLicensesMetadata); }); @@ -88,7 +97,7 @@ private LicensesMetadata execute( specBuilder.maxNodes(LicenseSettings.SELF_GENERATED_LICENSE_MAX_NODES); } License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, discoveryNodes); - LicensesMetadata newLicensesMetadata = new LicensesMetadata(selfGeneratedLicense, Version.CURRENT); + LicensesMetadata newLicensesMetadata = new LicensesMetadata(selfGeneratedLicense, TrialLicenseVersion.CURRENT); taskContext.success(() -> listener.onResponse(new PostStartTrialResponse(PostStartTrialResponse.Status.UPGRADED_TO_TRIAL))); return newLicensesMetadata; } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java index a6d762f61e951..518b45dd027ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java @@ -9,13 +9,13 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.xpack.core.XPackPlugin; import java.time.Clock; @@ -87,7 +87,7 @@ private ClusterState updateLicenseSignature(ClusterState currentState, LicensesM .type(type) .expiryDate(expiryDate); License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, currentState.nodes()); - Version trialVersion = currentLicenseMetadata.getMostRecentTrialVersion(); + TrialLicenseVersion trialVersion = currentLicenseMetadata.getMostRecentTrialVersion(); LicensesMetadata newLicenseMetadata = new LicensesMetadata(selfGeneratedLicense, trialVersion); mdBuilder.putCustom(LicensesMetadata.TYPE, newLicenseMetadata); logger.info( @@ -129,7 +129,7 @@ private LicensesMetadata createBasicLicenseFromExistingLicense(LicensesMetadata .type(License.LicenseType.BASIC) .expiryDate(LicenseSettings.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS); License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, currentLicense.version()); - Version trialVersion = currentLicenseMetadata.getMostRecentTrialVersion(); + TrialLicenseVersion trialVersion = currentLicenseMetadata.getMostRecentTrialVersion(); return new LicensesMetadata(selfGeneratedLicense, trialVersion); } @@ -152,7 +152,7 @@ private ClusterState updateWithLicense(ClusterState currentState, License.Licens License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, currentState.nodes()); LicensesMetadata licensesMetadata; if (License.LicenseType.TRIAL.equals(type)) { - licensesMetadata = new LicensesMetadata(selfGeneratedLicense, Version.CURRENT); + licensesMetadata = new LicensesMetadata(selfGeneratedLicense, TrialLicenseVersion.CURRENT); } else { licensesMetadata = new LicensesMetadata(selfGeneratedLicense, null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java index 1d3b4a0698ad5..27f7cbff2e3ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java @@ -55,6 +55,15 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + if (state.nodes().getMaxNodeVersion().after(state.nodes().getSmallestNonClientNodeVersion())) { + throw new IllegalStateException( + "Please ensure all nodes are on the same version before starting your trial, the highest node version in this cluster is [" + + state.nodes().getMaxNodeVersion() + + "] and the lowest node version is [" + + state.nodes().getMinNodeVersion() + + "]" + ); + } licenseService.startTrialLicense(request, listener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java new file mode 100644 index 0000000000000..6de9fec098a78 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/internal/TrialLicenseVersion.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.license.internal; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Sometimes we release a version with a bunch of cool new features, and we want people to be able to start a new trial license in a cluster + * that's already used a trial and let it expire. This class controls when we do that. The serialization of this class is designed to + * maintain compatibility with old-school Elasticsearch versions (specifically the {@link org.elasticsearch.Version} class). + */ +public class TrialLicenseVersion implements ToXContentFragment, Writeable { + + // This was the highest version at the time we cut over to having a specific version for the trial license, rather than reusing the + // generic Elasticsearch version. While it's derived from the Elasticsearch version formula for BWC, it is independent of it going + // forward. When we want users to be able to start a new trial, increment this number. + // Pkg-private for testing only. + static final int TRIAL_VERSION_CUTOVER = 8_12_00_99; + public static final TrialLicenseVersion CURRENT = new TrialLicenseVersion(TRIAL_VERSION_CUTOVER); + + // The most recently released major version when we cut over. Here for maintaining BWC behavior. + static final int TRIAL_VERSION_CUTOVER_MAJOR = 8; + + private final int trialVersion; + + public TrialLicenseVersion(int trialVersion) { + this.trialVersion = trialVersion; + } + + public TrialLicenseVersion(StreamInput in) throws IOException { + this.trialVersion = in.readVInt(); + } + + public static TrialLicenseVersion fromXContent(String from) { + try { + return new TrialLicenseVersion(Integer.parseInt(from)); + } catch (NumberFormatException ex) { + return new TrialLicenseVersion(parseVersionString(from)); + } + } + + // copied from Version and simplified, for backwards compatibility parsing old version strings in LicensesMetadata XContent + private static int parseVersionString(String version) { + final boolean snapshot = version.endsWith("-SNAPSHOT"); // this is some BWC for 2.x and before indices + if (snapshot) { + version = version.substring(0, version.length() - 9); + } + String[] parts = version.split("[.-]"); + if (parts.length != 3) { + throw new IllegalArgumentException("unable to parse trial license version: " + version); + } + + try { + final int rawMajor = Integer.parseInt(parts[0]); + // we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo + final int major = rawMajor * 1000000; + final int minor = Integer.parseInt(parts[1]) * 10000; + final int revision = Integer.parseInt(parts[2]) * 100; + + // 99 is leftover from alpha/beta/rc, it should be removed + return major + minor + revision + 99; + + } catch (NumberFormatException e) { + throw new IllegalArgumentException("unable to parse trial license version: " + version, e); + } + } + + int asInt() { + return trialVersion; + } + + public boolean ableToStartNewTrialSince(TrialLicenseVersion since) { + if (since.asInt() < TRIAL_VERSION_CUTOVER) { + int sinceMajorVersion = since.asInt() / 1_000_000; // integer division is intentional + return sinceMajorVersion < TRIAL_VERSION_CUTOVER_MAJOR; + } + return since.asInt() < trialVersion; + } + + @Override + public String toString() { + return Integer.toString(trialVersion); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.value(trialVersion); // suffix added for BWC + } + + // pkg-private for testing + String asVersionString() { + return this + ".0.0"; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrialLicenseVersion that = (TrialLicenseVersion) o; + return trialVersion == that.trialVersion; + } + + @Override + public int hashCode() { + return Objects.hash(trialVersion); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(trialVersion); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java index b9a5115a9fa1a..ee2b81049b4d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.cluster.action; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -63,15 +62,9 @@ public MigrateToDataTiersResponse(StreamInput in) throws IOException { migratedPolicies = in.readStringCollectionAsList(); migratedIndices = in.readStringCollectionAsList(); dryRun = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_17_0)) { - migratedLegacyTemplates = in.readStringCollectionAsList(); - migratedComposableTemplates = in.readStringCollectionAsList(); - migratedComponentTemplates = in.readStringCollectionAsList(); - } else { - migratedLegacyTemplates = List.of(); - migratedComposableTemplates = List.of(); - migratedComponentTemplates = List.of(); - } + migratedLegacyTemplates = in.readStringCollectionAsList(); + migratedComposableTemplates = in.readStringCollectionAsList(); + migratedComponentTemplates = in.readStringCollectionAsList(); } @Override @@ -154,11 +147,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(migratedPolicies); out.writeStringCollection(migratedIndices); out.writeBoolean(dryRun); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_17_0)) { - out.writeStringCollection(migratedLegacyTemplates); - out.writeStringCollection(migratedComposableTemplates); - out.writeStringCollection(migratedComponentTemplates); - } + out.writeStringCollection(migratedLegacyTemplates); + out.writeStringCollection(migratedComposableTemplates); + out.writeStringCollection(migratedComponentTemplates); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java index 51818e7d0d08a..945084395448a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java @@ -28,7 +28,7 @@ * is still running and AsyncTaskIndexService if task results already stored there. */ public class AsyncResultsService> { - private final Logger logger = LogManager.getLogger(AsyncResultsService.class); + private static final Logger logger = LogManager.getLogger(AsyncResultsService.class); private final Class asyncTaskClass; private final TaskManager taskManager; private final ClusterService clusterService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/DeleteAsyncResultsService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/DeleteAsyncResultsService.java index 0aa8b55cd0615..1eb271143a3b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/DeleteAsyncResultsService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/DeleteAsyncResultsService.java @@ -28,7 +28,7 @@ * is still running and AsyncTaskIndexService if task results already stored there. */ public class DeleteAsyncResultsService { - private final Logger logger = LogManager.getLogger(DeleteAsyncResultsService.class); + private static final Logger logger = LogManager.getLogger(DeleteAsyncResultsService.class); private final TaskManager taskManager; private final AsyncTaskIndexService> store; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 0bc3e83df77ea..fe5a224f85e00 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -40,7 +40,7 @@ private PutFollowAction() { super(NAME, PutFollowAction.Response::new); } - public static class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { + public static final class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { private static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); private static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index"); @@ -188,7 +188,6 @@ public IndicesOptions indicesOptions() { return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); } - @SuppressWarnings("this-escape") public Request(StreamInput in) throws IOException { super(in); this.remoteCluster = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java index 04a04b5ef4f41..311f3484900f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.TransportVersions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; @@ -123,7 +122,7 @@ public AllocateAction( public AllocateAction(StreamInput in) throws IOException { this( in.readOptionalVInt(), - in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0) ? in.readOptionalInt() : null, + in.readOptionalInt(), (Map) in.readGenericValue(), (Map) in.readGenericValue(), (Map) in.readGenericValue() @@ -153,9 +152,7 @@ public Map getRequire() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(numberOfReplicas); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeOptionalInt(totalShardsPerNode); - } + out.writeOptionalInt(totalShardsPerNode); out.writeGenericValue(include); out.writeGenericValue(exclude); out.writeGenericValue(require); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicy.java index 540a31b0116b0..322f64405ca1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicy.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ilm; +import org.elasticsearch.TransportVersions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.Strings; @@ -44,6 +45,7 @@ public class LifecyclePolicy implements SimpleDiffable, ToXCont public static final ParseField PHASES_FIELD = new ParseField("phases"); private static final ParseField METADATA = new ParseField("_meta"); + private static final ParseField DEPRECATED = new ParseField("deprecated"); private static final StepKey NEW_STEP_KEY = new StepKey("new", PhaseCompleteStep.NAME, PhaseCompleteStep.NAME); @@ -54,7 +56,7 @@ public class LifecyclePolicy implements SimpleDiffable, ToXCont (a, name) -> { List phases = (List) a[0]; Map phaseMap = phases.stream().collect(Collectors.toMap(Phase::getName, Function.identity())); - return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, name, phaseMap, (Map) a[1]); + return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, name, phaseMap, (Map) a[1], (Boolean) a[2]); } ); static { @@ -62,6 +64,7 @@ public class LifecyclePolicy implements SimpleDiffable, ToXCont throw new IllegalArgumentException("ordered " + PHASES_FIELD.getPreferredName() + " are not supported"); }, PHASES_FIELD); PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), METADATA); + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), DEPRECATED); } private final String name; @@ -69,6 +72,8 @@ public class LifecyclePolicy implements SimpleDiffable, ToXCont private final Map phases; @Nullable private final Map metadata; + @Nullable + private final Boolean deprecated; /** * @param name @@ -79,10 +84,12 @@ public class LifecyclePolicy implements SimpleDiffable, ToXCont * */ public LifecyclePolicy(String name, Map phases) { - this(TimeseriesLifecycleType.INSTANCE, name, phases, null); + this(TimeseriesLifecycleType.INSTANCE, name, phases, null, null); } /** + * @param type + * the {@link LifecycleType} of the policy * @param name * the name of this {@link LifecyclePolicy} * @param phases @@ -91,8 +98,8 @@ public LifecyclePolicy(String name, Map phases) { * @param metadata * the custom metadata of this {@link LifecyclePolicy} */ - public LifecyclePolicy(String name, Map phases, @Nullable Map metadata) { - this(TimeseriesLifecycleType.INSTANCE, name, phases, metadata); + public LifecyclePolicy(LifecycleType type, String name, Map phases, @Nullable Map metadata) { + this(type, name, phases, metadata, null); } /** @@ -103,6 +110,11 @@ public LifecyclePolicy(StreamInput in) throws IOException { name = in.readString(); phases = in.readImmutableMap(Phase::new); this.metadata = in.readMap(); + if (in.getTransportVersion().onOrAfter(TransportVersions.DEPRECATED_COMPONENT_TEMPLATES_ADDED)) { + this.deprecated = in.readOptionalBoolean(); + } else { + this.deprecated = null; + } } /** @@ -116,11 +128,18 @@ public LifecyclePolicy(StreamInput in) throws IOException { * @param metadata * the custom metadata of this {@link LifecyclePolicy} */ - public LifecyclePolicy(LifecycleType type, String name, Map phases, @Nullable Map metadata) { + public LifecyclePolicy( + LifecycleType type, + String name, + Map phases, + @Nullable Map metadata, + @Nullable Boolean deprecated + ) { this.name = name; this.phases = phases; this.type = type; this.metadata = metadata; + this.deprecated = deprecated; } public void validate() { @@ -137,6 +156,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeMap(phases, StreamOutput::writeWriteable); out.writeGenericMap(this.metadata); + if (out.getTransportVersion().onOrAfter(TransportVersions.DEPRECATED_COMPONENT_TEMPLATES_ADDED)) { + out.writeOptionalBoolean(deprecated); + } } /** @@ -168,6 +190,14 @@ public Map getMetadata() { return metadata; } + public Boolean getDeprecated() { + return deprecated; + } + + public boolean isDeprecated() { + return Boolean.TRUE.equals(deprecated); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -179,6 +209,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (this.metadata != null) { builder.field(METADATA.getPreferredName(), this.metadata); } + if (this.deprecated != null) { + builder.field(DEPRECATED.getPreferredName(), this.deprecated); + } builder.endObject(); return builder; } @@ -310,7 +343,7 @@ public static void validatePolicyName(String policy) { @Override public int hashCode() { - return Objects.hash(name, phases, metadata); + return Objects.hash(name, phases, metadata, deprecated); } @Override @@ -322,7 +355,10 @@ public boolean equals(Object obj) { return false; } LifecyclePolicy other = (LifecyclePolicy) obj; - return Objects.equals(name, other.name) && Objects.equals(phases, other.phases) && Objects.equals(metadata, other.metadata); + return Objects.equals(name, other.name) + && Objects.equals(phases, other.phases) + && Objects.equals(metadata, other.metadata) + && Objects.equals(deprecated, other.deprecated); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index e96836ab6a5fa..1388fb3e3db8f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -31,7 +31,7 @@ private DeleteDataFrameAnalyticsAction() { super(NAME, AcknowledgedResponse::readFrom); } - public static class Request extends AcknowledgedRequest { + public static final class Request extends AcknowledgedRequest { public static final ParseField FORCE = new ParseField("force"); public static final ParseField TIMEOUT = new ParseField("timeout"); @@ -48,7 +48,6 @@ public Request(StreamInput in) throws IOException { force = in.readBoolean(); } - @SuppressWarnings("this-escape") public Request() { timeout(DEFAULT_TIMEOUT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java index 599d992819427..c8c8e211fb1e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java @@ -27,16 +27,14 @@ private GetDataFrameAnalyticsAction() { super(NAME, Response::new); } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - @SuppressWarnings("this-escape") public Request() { setAllowNoResources(true); } - @SuppressWarnings("this-escape") public Request(String id) { setResourceId(id); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 9637ba3c0f92e..4ea73d1b7a6ee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -37,7 +37,7 @@ private GetDatafeedsAction() { super(NAME, Response::new); } - public static class Request extends MasterNodeReadRequest { + public static final class Request extends MasterNodeReadRequest { public static final String ALLOW_NO_MATCH = "allow_no_match"; @@ -49,7 +49,6 @@ public Request(String datafeedId) { this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } - @SuppressWarnings("this-escape") public Request() { local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index f37264cc6bc9b..dd838d368ee04 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -27,14 +27,12 @@ private GetFiltersAction() { super(NAME, Response::new); } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { - @SuppressWarnings("this-escape") public Request() { setAllowNoResources(true); } - @SuppressWarnings("this-escape") public Request(String filterId) { setResourceId(filterId); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 41358dc34f40b..490c4dd99fcb0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -35,7 +35,7 @@ private GetJobsAction() { super(NAME, Response::new); } - public static class Request extends MasterNodeReadRequest { + public static final class Request extends MasterNodeReadRequest { public static final String ALLOW_NO_MATCH = "allow_no_match"; @@ -47,7 +47,6 @@ public Request(String jobId) { this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } - @SuppressWarnings("this-escape") public Request() { local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java index 3d152048563c4..0c5fbbc065e29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java @@ -123,7 +123,7 @@ public int hashCode() { } } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { public static final ParseField INCLUDE = new ParseField("include"); public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); @@ -136,7 +136,6 @@ public Request(String id) { this(id, null, null); } - @SuppressWarnings("this-escape") public Request(String id, List tags, Set includes) { setResourceId(id); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java index cd1b5674dcb7b..cc91daf966ee5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -52,16 +52,14 @@ private GetTrainedModelsStatsAction() { super(NAME, GetTrainedModelsStatsAction.Response::new); } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - @SuppressWarnings("this-escape") public Request() { setAllowNoResources(true); } - @SuppressWarnings("this-escape") public Request(String id) { setResourceId(id); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java index 48a793155e542..d1c82635a83c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java @@ -45,7 +45,7 @@ private StopDataFrameAnalyticsAction() { super(NAME, StopDataFrameAnalyticsAction.Response::new); } - public static class Request extends BaseTasksRequest implements ToXContentObject { + public static final class Request extends BaseTasksRequest implements ToXContentObject { public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); public static final ParseField FORCE = new ParseField("force"); @@ -90,12 +90,11 @@ public Request(StreamInput in) throws IOException { expandedIds = new HashSet<>(Arrays.asList(in.readStringArray())); } - @SuppressWarnings("this-escape") public Request() { setTimeout(DEFAULT_TIMEOUT); } - public final Request setId(String id) { + public Request setId(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameAnalyticsConfig.ID); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java index 956c4713adb35..b433e2c89410f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java @@ -31,7 +31,7 @@ /** * Evaluation of classification results. */ -public class Classification implements Evaluation { +public final class Classification implements Evaluation { public static final ParseField NAME = new ParseField("classification"); @@ -75,7 +75,6 @@ public static Classification fromXContent(XContentParser parser) { */ private final List metrics; - @SuppressWarnings("this-escape") public Classification( String actualField, @Nullable String predictedField, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java index 84b82bde909a0..6ca9c20fda14a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java @@ -32,7 +32,7 @@ /** * Evaluation of outlier detection results. */ -public class OutlierDetection implements Evaluation { +public final class OutlierDetection implements Evaluation { public static final ParseField NAME = new ParseField("outlier_detection", "binary_soft_classification"); @@ -75,7 +75,6 @@ public static QueryBuilder actualIsTrueQuery(String actualField) { */ private final List metrics; - @SuppressWarnings("this-escape") public OutlierDetection(String actualField, String predictedProbabilityField, @Nullable List metrics) { this.fields = new EvaluationFields( ExceptionsHelper.requireNonNull(actualField, ACTUAL_FIELD), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java index 4b1e444f32224..1add514355e30 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java @@ -30,7 +30,7 @@ /** * Evaluation of regression results. */ -public class Regression implements Evaluation { +public final class Regression implements Evaluation { public static final ParseField NAME = new ParseField("regression"); @@ -69,7 +69,6 @@ public static Regression fromXContent(XContentParser parser) { */ private final List metrics; - @SuppressWarnings("this-escape") public Regression(String actualField, String predictedField, @Nullable List metrics) { this.fields = new EvaluationFields( ExceptionsHelper.requireNonNull(actualField, ACTUAL_FIELD), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index e92e6e9b99119..f69be31939b32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -40,7 +40,7 @@ /** * Trained model assignment object that contains assignment options and the assignment routing table */ -public class TrainedModelAssignment implements SimpleDiffable, ToXContentObject { +public final class TrainedModelAssignment implements SimpleDiffable, ToXContentObject { private static final ParseField REASON = new ParseField("reason"); private static final ParseField ASSIGNMENT_STATE = new ParseField("assignment_state"); @@ -137,7 +137,6 @@ private TrainedModelAssignment( : Math.max(maxAssignedAllocations, totalCurrentAllocations()); } - @SuppressWarnings("this-escape") public TrainedModelAssignment(StreamInput in) throws IOException { this.taskParams = new StartTrainedModelDeploymentAction.TaskParams(in); this.nodeRoutingTable = in.readOrderedMap(StreamInput::readString, RoutingInfo::new); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java index b472c6ef32163..966db6e785c5c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java @@ -295,14 +295,13 @@ public TransportVersion getMinimalCompatibilityVersion() { return TransportVersions.V_7_6_0; } - public static class Builder { + public static final class Builder { private List featureNames; private ArrayList nodes; private int numNodes; private TargetType targetType = TargetType.REGRESSION; private List classificationLabels; - @SuppressWarnings("this-escape") public Builder() { nodes = new ArrayList<>(); // allocate space in the root node and set to a leaf diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java index 21b572907d037..51d2ac41cc157 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java @@ -448,7 +448,7 @@ public int hashCode() { ); } - public static class Builder { + public static final class Builder { public static final TimeValue DEFAULT_BUCKET_SPAN = TimeValue.timeValueMinutes(5); @@ -464,7 +464,6 @@ public static class Builder { private Boolean multivariateByFields; private TimeValue modelPruneWindow; - @SuppressWarnings("this-escape") public Builder(List detectors) { setDetectors(detectors); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java index 827b25f39f23f..775640ac2048f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java @@ -35,7 +35,7 @@ * so the field is visible. */ -public class DataCounts implements ToXContentObject, Writeable { +public final class DataCounts implements ToXContentObject, Writeable { private static final String DOCUMENT_SUFFIX = "_data_counts"; @@ -161,7 +161,6 @@ public static String v54DocumentId(String jobId) { private Date latestSparseBucketTimeStamp; private Instant logTime; - @SuppressWarnings("this-escape") public DataCounts( String jobId, long processedRecordCount, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java index 3cccb0006d658..618264c610516 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java @@ -30,7 +30,7 @@ * information about errors, progress and counters. There is exactly 1 document * per forecast request, getting updated while the request is processed. */ -public class ForecastRequestStats implements ToXContentObject, Writeable { +public final class ForecastRequestStats implements ToXContentObject, Writeable { /** * Result type */ @@ -147,7 +147,6 @@ public ForecastRequestStats(ForecastRequestStats forecastRequestStats) { this.status = forecastRequestStats.status; } - @SuppressWarnings("this-escape") public ForecastRequestStats(StreamInput in) throws IOException { jobId = in.readString(); forecastId = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java index 250efe349d504..e1644e5113a27 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java @@ -28,13 +28,8 @@ public class SearchableSnapshotFeatureSetUsage extends XPackFeatureSet.Usage { public SearchableSnapshotFeatureSetUsage(StreamInput input) throws IOException { super(input); numberOfSearchableSnapshotIndices = input.readVInt(); - if (input.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - numberOfFullCopySearchableSnapshotIndices = input.readVInt(); - numberOfSharedCacheSearchableSnapshotIndices = input.readVInt(); - } else { - numberOfFullCopySearchableSnapshotIndices = 0; - numberOfSharedCacheSearchableSnapshotIndices = 0; - } + numberOfFullCopySearchableSnapshotIndices = input.readVInt(); + numberOfSharedCacheSearchableSnapshotIndices = input.readVInt(); } @Override @@ -46,10 +41,8 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(numberOfSearchableSnapshotIndices); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeVInt(numberOfFullCopySearchableSnapshotIndices); - out.writeVInt(numberOfSharedCacheSearchableSnapshotIndices); - } + out.writeVInt(numberOfFullCopySearchableSnapshotIndices); + out.writeVInt(numberOfSharedCacheSearchableSnapshotIndices); } public SearchableSnapshotFeatureSetUsage( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java index ec99301b92357..1177ff68c34c4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java @@ -102,7 +102,7 @@ public final class DocumentSubsetBitsetCache implements IndexReader.ClosedListen private static final BitSet NULL_MARKER = new FixedBitSet(0); - private final Logger logger; + private static final Logger logger = LogManager.getLogger(DocumentSubsetBitsetCache.class); /** * When a {@link BitSet} is evicted from {@link #bitsetCache}, we need to also remove it from {@link #keysByIndex}. @@ -131,8 +131,6 @@ public DocumentSubsetBitsetCache(Settings settings, ThreadPool threadPool) { * it is sometimes necessary to run an asynchronous task to synchronize the internal state. */ protected DocumentSubsetBitsetCache(Settings settings, ExecutorService cleanupExecutor) { - this.logger = LogManager.getLogger(getClass()); - final ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock(); this.cacheEvictionLock = new ReleasableLock(readWriteLock.writeLock()); this.cacheModificationLock = new ReleasableLock(readWriteLock.readLock()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index f32e6709851de..c85a648761ca7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -34,7 +34,7 @@ public final class ApplicationPermission { public static final ApplicationPermission NONE = new ApplicationPermission(Collections.emptyList()); - private final Logger logger; + private static final Logger logger = LogManager.getLogger(ApplicationPermission.class); private final List permissions; /** @@ -43,7 +43,6 @@ public final class ApplicationPermission { * applied. The resources are treated as a wildcard {@link Automatons#pattern}. */ ApplicationPermission(List>> privilegesAndResources) { - this.logger = LogManager.getLogger(getClass()); Map permissionsByPrivilege = new HashMap<>(); privilegesAndResources.forEach(tup -> permissionsByPrivilege.compute(tup.v1(), (appPriv, existing) -> { final Set resourceNames = tup.v2(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java index 20ff6f4c4ad17..f8dccd244fea9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java @@ -14,7 +14,7 @@ /** * A {@link NamedClusterPrivilege} that can be used to define an access to cluster level actions. */ -public class ActionClusterPrivilege implements NamedClusterPrivilege { +public final class ActionClusterPrivilege implements NamedClusterPrivilege { private final String name; private final Set allowedActionPatterns; private final Set excludedActionPatterns; @@ -39,7 +39,6 @@ public ActionClusterPrivilege(final String name, final Set allowedAction * @param allowedActionPatterns a set of cluster action patterns * @param excludedActionPatterns a set of cluster action patterns */ - @SuppressWarnings("this-escape") public ActionClusterPrivilege(final String name, final Set allowedActionPatterns, final Set excludedActionPatterns) { this.name = name; this.allowedActionPatterns = allowedActionPatterns; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java index 438c457d84305..cb55de79342b8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java @@ -33,14 +33,13 @@ /** * A configuration loader for SSL Settings */ -public class SslSettingsLoader extends SslConfigurationLoader { +public final class SslSettingsLoader extends SslConfigurationLoader { private final Settings settings; private final Map> secureSettings; private final Map> standardSettings; private final Map> disabledSettings; - @SuppressWarnings("this-escape") public SslSettingsLoader(Settings settings, String settingPrefix, boolean acceptNonSecurePasswords) { super(settingPrefix); this.settings = settings; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java index 46850e5f4d067..95edd3fc3bb92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java @@ -30,7 +30,7 @@ /** * A request to gather terms for a given field matching a string prefix */ -public class TermsEnumRequest extends BroadcastRequest implements ToXContentObject { +public final class TermsEnumRequest extends BroadcastRequest implements ToXContentObject { public static final IndicesOptions DEFAULT_INDICES_OPTIONS = SearchRequest.DEFAULT_INDICES_OPTIONS; public static int DEFAULT_SIZE = 10; @@ -51,14 +51,12 @@ public TermsEnumRequest() { * Constructs a new term enum request against the provided indices. No indices provided means it will * run against all indices. */ - @SuppressWarnings("this-escape") public TermsEnumRequest(String... indices) { super(indices); indicesOptions(DEFAULT_INDICES_OPTIONS); timeout(DEFAULT_TIMEOUT); } - @SuppressWarnings("this-escape") public TermsEnumRequest(TermsEnumRequest clone) { this.field = clone.field; this.string = clone.string; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java index 1a59f373d75f3..b39fc27b7a148 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java @@ -523,7 +523,7 @@ public boolean equals(Object other) { && Objects.equals(this.explanation, that.explanation); } - public static class Builder { + public static final class Builder { private int numLinesAnalyzed; private int numMessagesAnalyzed; @@ -553,7 +553,6 @@ public Builder() { this(Format.SEMI_STRUCTURED_TEXT); } - @SuppressWarnings("this-escape") public Builder(Format format) { setFormat(format); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 5ff153db7467d..46e844f93695e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -49,7 +49,7 @@ public GetTransformStatsAction() { super(NAME, GetTransformStatsAction.Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final String id; private PageParams pageParams = PageParams.defaultParams(); private boolean allowNoMatch = true; @@ -58,7 +58,6 @@ public static class Request extends BaseTasksRequest { // used internally to expand the queried id expression private List expandedIds; - @SuppressWarnings("this-escape") public Request(String id, @Nullable TimeValue timeout) { setTimeout(timeout); if (Strings.isNullOrEmpty(id) || id.equals("*")) { @@ -96,11 +95,11 @@ public void setExpandedIds(List expandedIds) { this.expandedIds = List.copyOf(expandedIds); } - public final void setPageParams(PageParams pageParams) { + public void setPageParams(PageParams pageParams) { this.pageParams = Objects.requireNonNull(pageParams); } - public final PageParams getPageParams() { + public PageParams getPageParams() { return pageParams; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java index 57c14d17cce40..f48e06a3f743c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java @@ -37,11 +37,10 @@ private ScheduleNowTransformAction() { super(NAME, ScheduleNowTransformAction.Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final String id; - @SuppressWarnings("this-escape") public Request(String id, TimeValue timeout) { this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.setTimeout(ExceptionsHelper.requireNonNull(timeout, TransformField.TIMEOUT.getPreferredName())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java index 097ae6bb05a07..794bf009764f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java @@ -46,7 +46,7 @@ private StopTransformAction() { super(NAME, StopTransformAction.Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final String id; private final boolean waitForCompletion; private final boolean force; @@ -54,7 +54,6 @@ public static class Request extends BaseTasksRequest { private final boolean waitForCheckpoint; private Set expandedIds; - @SuppressWarnings("this-escape") public Request( String id, boolean waitForCompletion, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java index e943d0dd50ac8..f9da4082dbfa2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java @@ -42,7 +42,7 @@ private UpdateTransformAction() { super(NAME, Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final TransformConfigUpdate update; private final String id; @@ -50,7 +50,6 @@ public static class Request extends BaseTasksRequest { private TransformConfig config; private AuthorizationState authState; - @SuppressWarnings("this-escape") public Request(TransformConfigUpdate update, String id, boolean deferValidation, TimeValue timeout) { this.update = update; this.id = id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java index 6a33a3beaa191..9658cdd74e196 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java @@ -53,7 +53,7 @@ /** * This class holds the configuration details of a data frame transform */ -public class TransformConfig implements SimpleDiffable, Writeable, ToXContentObject { +public final class TransformConfig implements SimpleDiffable, Writeable, ToXContentObject { /** * Version of the last time the config defaults have been changed. @@ -209,7 +209,6 @@ public static String documentId(String transformId) { return NAME + "-" + transformId; } - @SuppressWarnings("this-escape") public TransformConfig( final String id, final SourceConfig source, @@ -245,7 +244,6 @@ public TransformConfig( this.transformVersion = version == null ? null : TransformConfigVersion.fromString(version); } - @SuppressWarnings("this-escape") public TransformConfig(final StreamInput in) throws IOException { id = in.readString(); source = new SourceConfig(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java index af13fcbcb7139..05e43a11ba841 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java @@ -30,7 +30,7 @@ /** * This class holds the mutable configuration items for a data frame transform */ -public class TransformConfigUpdate implements Writeable { +public final class TransformConfigUpdate implements Writeable { public static final String NAME = "data_frame_transform_config_update"; @@ -107,7 +107,6 @@ public TransformConfigUpdate( this.retentionPolicyConfig = retentionPolicyConfig; } - @SuppressWarnings("this-escape") public TransformConfigUpdate(final StreamInput in) throws IOException { source = in.readOptionalWriteable(SourceConfig::new); dest = in.readOptionalWriteable(DestConfig::new); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java index fe649a203bcce..c1bda220671b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java @@ -21,11 +21,10 @@ * Calling toNestedMap() will create a nested map, where each dot of the key name will nest deeper * The main reason for this class is that the stats producer should not be worried about how the map is actually nested */ -public class Counters implements Writeable { +public final class Counters implements Writeable { private Map counters = new HashMap<>(); - @SuppressWarnings("this-escape") public Counters(StreamInput in) throws IOException { int numCounters = in.readVInt(); for (int i = 0; i < numCounters; i++) { @@ -33,7 +32,6 @@ public Counters(StreamInput in) throws IOException { } } - @SuppressWarnings("this-escape") public Counters(String... names) { for (String name : names) { set(name); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java index 8d1f521fcd7e2..d681a84933bc1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java @@ -16,13 +16,12 @@ import java.util.HashMap; import java.util.Map; -public class TransformRegistry { +public final class TransformRegistry { private final Map< String, TransformFactory>> factories; - @SuppressWarnings("this-escape") public TransformRegistry( Map>> factories ) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java index 6d587468ff0dd..f5225e4981e2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java @@ -143,11 +143,10 @@ protected XContentBuilder typeXContent(XContentBuilder builder, Params params) t } } - public static class Builder implements Transform.Builder { + public static final class Builder implements Transform.Builder { private final List transforms = new ArrayList<>(); - @SuppressWarnings("this-escape") public Builder(Transform... transforms) { add(transforms); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesMetadataSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesMetadataSerializationTests.java index 564f4171569ad..4ed306bf734fc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesMetadataSerializationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesMetadataSerializationTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -49,7 +50,7 @@ public void testXContentSerializationOneSignedLicense() throws Exception { public void testXContentSerializationOneSignedLicenseWithUsedTrial() throws Exception { License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(2)); - LicensesMetadata licensesMetadata = new LicensesMetadata(license, Version.CURRENT); + LicensesMetadata licensesMetadata = new LicensesMetadata(license, TrialLicenseVersion.CURRENT); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); builder.startObject("licenses"); @@ -58,12 +59,12 @@ public void testXContentSerializationOneSignedLicenseWithUsedTrial() throws Exce builder.endObject(); LicensesMetadata licensesMetadataFromXContent = getLicensesMetadataFromXContent(createParser(builder)); assertThat(licensesMetadataFromXContent.getLicense(), equalTo(license)); - assertEquals(licensesMetadataFromXContent.getMostRecentTrialVersion(), Version.CURRENT); + assertEquals(licensesMetadataFromXContent.getMostRecentTrialVersion(), TrialLicenseVersion.CURRENT); } public void testLicenseMetadataParsingDoesNotSwallowOtherMetadata() throws Exception { License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(2)); - LicensesMetadata licensesMetadata = new LicensesMetadata(license, Version.CURRENT); + LicensesMetadata licensesMetadata = new LicensesMetadata(license, TrialLicenseVersion.CURRENT); RepositoryMetadata repositoryMetadata = new RepositoryMetadata("repo", "fs", Settings.EMPTY); RepositoriesMetadata repositoriesMetadata = new RepositoriesMetadata(Collections.singletonList(repositoryMetadata)); final Metadata.Builder metadataBuilder = Metadata.builder(); @@ -97,7 +98,7 @@ public void testXContentSerializationOneTrial() throws Exception { .type(randomBoolean() ? "trial" : "basic") .expiryDate(issueDate + TimeValue.timeValueHours(2).getMillis()); final License trialLicense = SelfGeneratedLicense.create(specBuilder, License.VERSION_CURRENT); - LicensesMetadata licensesMetadata = new LicensesMetadata(trialLicense, Version.CURRENT); + LicensesMetadata licensesMetadata = new LicensesMetadata(trialLicense, TrialLicenseVersion.CURRENT); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); builder.startObject("licenses"); @@ -106,7 +107,7 @@ public void testXContentSerializationOneTrial() throws Exception { builder.endObject(); LicensesMetadata licensesMetadataFromXContent = getLicensesMetadataFromXContent(createParser(builder)); assertThat(licensesMetadataFromXContent.getLicense(), equalTo(trialLicense)); - assertEquals(licensesMetadataFromXContent.getMostRecentTrialVersion(), Version.CURRENT); + assertEquals(licensesMetadataFromXContent.getMostRecentTrialVersion(), TrialLicenseVersion.CURRENT); } public void testLicenseTombstoneFromXContext() throws Exception { @@ -130,7 +131,7 @@ public void testLicenseTombstoneWithUsedTrialFromXContext() throws Exception { builder.endObject(); LicensesMetadata metadataFromXContent = getLicensesMetadataFromXContent(createParser(builder)); assertThat(metadataFromXContent.getLicense(), equalTo(LicensesMetadata.LICENSE_TOMBSTONE)); - assertEquals(metadataFromXContent.getMostRecentTrialVersion(), Version.CURRENT); + assertEquals(metadataFromXContent.getMostRecentTrialVersion(), TrialLicenseVersion.CURRENT); } private static LicensesMetadata getLicensesMetadataFromXContent(XContentParser parser) throws Exception { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/internal/TrialLicenseVersionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/internal/TrialLicenseVersionTests.java new file mode 100644 index 0000000000000..ff62fbc4d4877 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/internal/TrialLicenseVersionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.license.internal; + +import org.elasticsearch.Version; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.license.internal.TrialLicenseVersion.TRIAL_VERSION_CUTOVER; +import static org.elasticsearch.license.internal.TrialLicenseVersion.TRIAL_VERSION_CUTOVER_MAJOR; +import static org.hamcrest.Matchers.equalTo; + +public class TrialLicenseVersionTests extends ESTestCase { + + public void testCanParseAllVersions() { + for (var version : Version.getDeclaredVersions(Version.class)) { + TrialLicenseVersion parsedVersion = TrialLicenseVersion.fromXContent(version.toString()); + if (version.major < TRIAL_VERSION_CUTOVER_MAJOR) { + assertTrue(new TrialLicenseVersion(TRIAL_VERSION_CUTOVER).ableToStartNewTrialSince(parsedVersion)); + } else { + assertFalse(new TrialLicenseVersion(TRIAL_VERSION_CUTOVER).ableToStartNewTrialSince(parsedVersion)); + } + } + } + + public void testRoundTripParsing() { + var randomVersion = new TrialLicenseVersion(randomNonNegativeInt()); + assertThat(TrialLicenseVersion.fromXContent(randomVersion.toString()), equalTo(randomVersion)); + } + + public void testNewTrialAllowed() { + var randomVersion = new TrialLicenseVersion(randomNonNegativeInt()); + var subsequentVersion = new TrialLicenseVersion( + randomVersion.asInt() + randomIntBetween(0, Integer.MAX_VALUE - randomVersion.asInt()) + ); + assertFalse(randomVersion.ableToStartNewTrialSince(randomVersion)); + assertTrue(subsequentVersion.ableToStartNewTrialSince(randomVersion)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java index ea1fe24fffea1..79cb0d3250bb2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java @@ -139,7 +139,7 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicyWithAllPhases(@Null } phases.put(phase, new Phase(phase, after, actions)); } - return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, lifecycleName, phases, randomMeta()); + return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, lifecycleName, phases, randomMeta(), randomOptionalBoolean()); } public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String lifecycleName) { @@ -230,7 +230,7 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l } else { phases.remove(TimeseriesLifecycleType.FROZEN_PHASE); } - return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, lifecycleName, phases, randomMeta()); + return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, lifecycleName, phases, randomMeta(), randomOptionalBoolean()); } private static Function> getPhaseToValidActions() { @@ -276,14 +276,16 @@ public static LifecyclePolicy randomTestLifecyclePolicy(@Nullable String lifecyc String phaseName = randomAlphaOfLength(10); phases.put(phaseName, new Phase(phaseName, after, actions)); } - return new LifecyclePolicy(TestLifecycleType.INSTANCE, lifecycleName, phases, randomMeta()); + return new LifecyclePolicy(TestLifecycleType.INSTANCE, lifecycleName, phases, randomMeta(), randomOptionalBoolean()); } @Override protected LifecyclePolicy mutateInstance(LifecyclePolicy instance) { String name = instance.getName(); Map phases = instance.getPhases(); - switch (between(0, 1)) { + Map metadata = instance.getMetadata(); + Boolean deprecated = instance.getDeprecated(); + switch (between(0, 3)) { case 0 -> name = name + randomAlphaOfLengthBetween(1, 5); case 1 -> { // Remove the frozen phase, because it makes a lot of invalid phases when randomly mutating an existing policy @@ -303,9 +305,11 @@ protected LifecyclePolicy mutateInstance(LifecyclePolicy instance) { phases = new LinkedHashMap<>(phases); phases.put(phaseName, new Phase(phaseName, null, Collections.emptyMap())); } + case 2 -> metadata = randomValueOtherThan(metadata, LifecyclePolicyTests::randomMeta); + case 3 -> deprecated = instance.isDeprecated() ? randomFrom(false, null) : true; default -> throw new AssertionError("Illegal randomisation branch"); } - return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, name, phases, randomMeta()); + return new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, name, phases, metadata, deprecated); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java index ec3fc2b8a88ef..2ac6b633c0f09 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java @@ -313,7 +313,7 @@ private SnapshotInfo makeInfo(long startTime) { meta.put(SnapshotsService.POLICY_ID_METADATA_FIELD, REPO); final int totalShards = between(1, 20); SnapshotInfo snapInfo = new SnapshotInfo( - new Snapshot(REPO, new SnapshotId("snap-" + randomAlphaOfLength(3), "uuid")), + new Snapshot(REPO, new SnapshotId("snap-" + randomUUID(), "uuid")), Collections.singletonList("foo"), Collections.singletonList("bar"), Collections.emptyList(), diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java index dbdc09712eb25..a451439fadea1 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java @@ -13,14 +13,13 @@ import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -public class AggregateMetricFieldValueFetcher extends FieldValueFetcher { +public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher { private final AggregateDoubleMetricFieldType aggMetricFieldType; private final AbstractDownsampleFieldProducer fieldProducer; - @SuppressWarnings("this-escape") - protected AggregateMetricFieldValueFetcher( + AggregateMetricFieldValueFetcher( MappedFieldType fieldType, AggregateDoubleMetricFieldType aggMetricFieldType, IndexFieldData fieldData diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java index d8486bcd930ec..9b2ce393e5b04 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java @@ -14,8 +14,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -56,8 +54,6 @@ public class QueryRule implements Writeable, ToXContentObject { private final List criteria; private final Map actions; - private final Logger logger = LogManager.getLogger(QueryRule.class); - public enum QueryRuleType { PINNED; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java index ad1626fd2f4b3..731408d3c6ef8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java @@ -25,8 +25,6 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -69,8 +67,6 @@ public class RuleQueryBuilder extends AbstractQueryBuilder { private final List pinnedDocs; private final Supplier> pinnedDocsSupplier; - private final Logger logger = LogManager.getLogger(RuleQueryBuilder.class); - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_500_040; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java index e2be554f05890..bed5cc0cacd6c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java @@ -12,8 +12,6 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.TemplateScript; @@ -35,8 +33,6 @@ public class SearchApplicationTemplateService { private final ScriptService scriptService; private final NamedXContentRegistry xContentRegistry; - private final Logger logger = LogManager.getLogger(SearchApplicationTemplateService.class); - public SearchApplicationTemplateService(ScriptService scriptService, NamedXContentRegistry xContentRegistry) { this.scriptService = scriptService; this.xContentRegistry = xContentRegistry; diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java index 444ee72cf8aa5..6efe56fbf2222 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java @@ -12,13 +12,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; @@ -84,7 +82,7 @@ static List parseHosts(String props) { public static void configureRemoteClusters(List remoteNodes) throws Exception { assertThat(remoteNodes, hasSize(3)); final String remoteClusterSettingPrefix = "cluster.remote." + CLUSTER_ALIAS + "."; - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { final Settings remoteConnectionSettings; if (randomBoolean()) { final List seeds = remoteNodes.stream() @@ -118,28 +116,32 @@ public static void configureRemoteClusters(List remoteNodes) throws Except } } - static RestHighLevelClient newLocalClient() { + static RestClient newLocalClient() { final List hosts = parseHosts("tests.rest.cluster"); final int index = random().nextInt(hosts.size()); LOGGER.info("Using client node {}", index); - return new RestHighLevelClient(RestClient.builder(hosts.get(index))); + return RestClient.builder(hosts.get(index)).build(); } - static RestHighLevelClient newRemoteClient() { - return new RestHighLevelClient(RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster")))); + static RestClient newRemoteClient() { + return RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster"))).build(); } - static int indexDocs(RestHighLevelClient client, String index, int numDocs) throws IOException { + static int indexDocs(RestClient client, String index, int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { - client.index(new IndexRequest(index).id("id_" + i).source("f", i, "@timestamp", i), RequestOptions.DEFAULT); + Request createDoc = new Request("POST", "/" + index + "/_doc/id_" + i); + createDoc.setJsonEntity(Strings.format(""" + { "f": %s, "@timestamp": %s } + """, i, i)); + assertOK(client.performRequest(createDoc)); } - refresh(client.getLowLevelClient(), index); + refresh(client, index); return numDocs; } void verify(String localIndex, int localNumDocs, String remoteIndex, int remoteNumDocs) { - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { Request request = new Request("POST", "/" + randomFrom(remoteIndex, localIndex + "," + remoteIndex) + "/_eql/search"); int size = between(1, 100); @@ -161,9 +163,9 @@ void verify(String localIndex, int localNumDocs, String remoteIndex, int remoteN public void testSequences() throws Exception { String localIndex = "test_bwc_search_states_index"; String remoteIndex = "test_bwc_search_states_remote_index"; - try (RestHighLevelClient localClient = newLocalClient(); RestHighLevelClient remoteClient = newRemoteClient()) { + try (RestClient localClient = newLocalClient(); RestClient remoteClient = newRemoteClient()) { createIndex( - localClient.getLowLevelClient(), + localClient, localIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build(), "{\"properties\": {\"@timestamp\": {\"type\": \"date\"}}}", @@ -171,7 +173,7 @@ public void testSequences() throws Exception { ); int localNumDocs = indexDocs(localClient, localIndex, between(10, 100)); createIndex( - remoteClient.getLowLevelClient(), + remoteClient, remoteIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build(), "{\"properties\": {\"@timestamp\": {\"type\": \"date\"}}}", @@ -179,13 +181,13 @@ public void testSequences() throws Exception { ); int remoteNumDocs = indexDocs(remoteClient, remoteIndex, between(10, 100)); - configureRemoteClusters(getNodes(remoteClient.getLowLevelClient())); + configureRemoteClusters(getNodes(remoteClient)); int iterations = between(1, 20); for (int i = 0; i < iterations; i++) { verify(localIndex, localNumDocs, CLUSTER_ALIAS + ":" + remoteIndex, remoteNumDocs); } - deleteIndex(localClient.getLowLevelClient(), localIndex); - deleteIndex(remoteClient.getLowLevelClient(), remoteIndex); + deleteIndex(localClient, localIndex); + deleteIndex(remoteClient, remoteIndex); } } } diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java index e11d1cab8eaa7..90244d9b2c019 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java @@ -66,7 +66,7 @@ public void setup() throws Exception { ); if (dataLoaded == false) { - DataLoader.loadDatasetIntoEs(highLevelClient(provisioningClient), this::createParser); + DataLoader.loadDatasetIntoEs(provisioningClient, this::createParser); } } diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java index 588c2d87f743d..1d51af574c810 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java @@ -8,23 +8,20 @@ import org.apache.http.HttpHost; import org.apache.logging.log4j.LogManager; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContent; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.ql.TestUtils; @@ -53,7 +50,6 @@ * * While the loader could be made generic, the queries are bound to each index and generalizing that would make things way too complicated. */ -@SuppressWarnings("removal") public class DataLoader { public static final String TEST_INDEX = "endgame-140"; public static final String TEST_EXTRA_INDEX = "extra"; @@ -79,15 +75,12 @@ private static Map getReplacementPatterns() { public static void main(String[] args) throws IOException { main = true; try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - loadDatasetIntoEs(new RestHighLevelClient(client, ignore -> {}, List.of()) { - }, DataLoader::createParser); + loadDatasetIntoEs(client, DataLoader::createParser); } } - public static void loadDatasetIntoEs( - RestHighLevelClient client, - CheckedBiFunction p - ) throws IOException { + public static void loadDatasetIntoEs(RestClient client, CheckedBiFunction p) + throws IOException { // // Main Index @@ -113,7 +106,7 @@ public static void loadDatasetIntoEs( } private static void load( - RestHighLevelClient client, + RestClient client, String indexNames, String dataName, Consumer> datasetTransform, @@ -136,14 +129,8 @@ private static void load( } } - private static void createTestIndex(RestHighLevelClient client, String indexName, String mapping) throws IOException { - ESRestTestCase.createIndex( - client.getLowLevelClient(), - indexName, - Settings.builder().put("number_of_shards", 1).build(), - mapping, - null - ); + private static void createTestIndex(RestClient client, String indexName, String mapping) throws IOException { + ESRestTestCase.createIndex(client, indexName, Settings.builder().put("number_of_shards", 1).build(), mapping, null); } /** @@ -171,30 +158,40 @@ private static CharSequence randomOf(String... values) { @SuppressWarnings("unchecked") private static void loadData( - RestHighLevelClient client, + RestClient client, String indexName, Consumer> datasetTransform, URL resource, CheckedBiFunction p ) throws IOException { - BulkRequest bulk = new BulkRequest(); - bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + StringBuilder bulkRequestBody = new StringBuilder(); + String actionMetadata = Strings.format("{ \"index\" : { \"_index\" : \"%s\" } }%n", indexName); + int bulkDocuments; try (XContentParser parser = p.apply(JsonXContent.jsonXContent, TestUtils.inputStream(resource))) { List list = parser.list(); + bulkDocuments = list.size(); for (Object item : list) { assertThat(item, instanceOf(Map.class)); Map entry = (Map) item; if (datasetTransform != null) { datasetTransform.accept(entry); } - bulk.add(new IndexRequest(indexName).source(entry, XContentType.JSON)); + bulkRequestBody.append(actionMetadata); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.map(entry); + bulkRequestBody.append(Strings.toString(builder)); + } + bulkRequestBody.append("\n"); } } - if (bulk.numberOfActions() > 0) { - BulkResponse bulkResponse = client.bulk(bulk, RequestOptions.DEFAULT); - if (bulkResponse.hasFailures()) { + if (bulkDocuments > 0) { + Request request = new Request("POST", "_bulk?refresh=true"); + request.setJsonEntity(bulkRequestBody.toString()); + ObjectPath response = ObjectPath.createFromResponse(client.performRequest(request)); + boolean errors = response.evaluate("errors"); + if (errors) { LogManager.getLogger(DataLoader.class).info("Data loading FAILED"); } else { LogManager.getLogger(DataLoader.class).info("Data loading OK"); diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java index 65ed174f55625..49d25af3bfe53 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java @@ -77,7 +77,7 @@ public void testBadRequests() throws Exception { assertBadRequest(""" {"query": "sample by event.category [any where true] [any where true]", "fetch_size": 1001} - """, "Fetch size cannot be greater than [1000]", 500); + """, "Fetch size cannot be greater than [1000]", 400); deleteIndexWithProvisioningClient(defaultValidationIndexName); } diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java index 739f3fc83cd1e..88858831b37d1 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -21,14 +20,13 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.Before; import java.io.IOException; -import java.util.Collections; +import java.util.List; import static org.elasticsearch.common.Strings.hasText; -@SuppressWarnings("removal") public abstract class RemoteClusterAwareEqlRestTestCase extends ESRestTestCase { private static final long CLIENT_TIMEOUT = 40L; // upped from 10s to accomodate for max measured throughput decline @@ -36,19 +34,14 @@ public abstract class RemoteClusterAwareEqlRestTestCase extends ESRestTestCase { // client used for loading data on a remote cluster only. private static RestClient remoteClient; - @BeforeClass - public static void initRemoteClients() throws IOException { - String crossClusterHost = System.getProperty("tests.rest.cluster.remote.host"); // gradle defined - if (crossClusterHost != null) { - int portSeparator = crossClusterHost.lastIndexOf(':'); - if (portSeparator < 0) { - throw new IllegalArgumentException("Illegal cluster url [" + crossClusterHost + "]"); + @Before + public void initRemoteClients() throws IOException { + if (remoteClient == null) { + String crossClusterHost = getRemoteCluster(); + if (crossClusterHost != null) { + List httpHosts = parseClusterHosts(crossClusterHost); + remoteClient = clientBuilder(secureRemoteClientSettings(), httpHosts.toArray(new HttpHost[0])); } - String host = crossClusterHost.substring(0, portSeparator); - int port = Integer.parseInt(crossClusterHost.substring(portSeparator + 1)); - HttpHost[] remoteHttpHosts = new HttpHost[] { new HttpHost(host, port) }; - - remoteClient = clientBuilder(secureRemoteClientSettings(), remoteHttpHosts); } } @@ -61,9 +54,8 @@ public static void closeRemoteClients() throws IOException { } } - protected static RestHighLevelClient highLevelClient(RestClient client) { - return new RestHighLevelClient(client, ignore -> {}, Collections.emptyList()) { - }; + protected String getRemoteCluster() { + return System.getProperty("tests.rest.cluster.remote.host"); } protected static RestClient clientBuilder(Settings settings, HttpHost[] hosts) throws IOException { @@ -123,8 +115,8 @@ protected Settings restClientSettings() { } protected static Settings secureRemoteClientSettings() { - String user = System.getProperty("tests.rest.cluster.remote.user"); // gradle defined - String pass = System.getProperty("tests.rest.cluster.remote.password"); + String user = System.getProperty("tests.rest.cluster.remote.user", "test_user"); // gradle defined + String pass = System.getProperty("tests.rest.cluster.remote.password", "x-pack-test-password"); if (hasText(user) && hasText(pass)) { String token = basicAuthHeaderValue(user, new SecureString(pass.toCharArray())); return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java index 2626fcfda1ef1..38820056db81f 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.test.eql.stats; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -20,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -29,10 +27,8 @@ /** * Tests a random number of queries that increase various (most of the times, one query will "touch" multiple metrics values) metrics. */ -@SuppressWarnings("removal") public abstract class EqlUsageRestTestCase extends ESRestTestCase { - private RestHighLevelClient highLevelClient; private Map baseMetrics = new HashMap<>(); private Integer baseAllTotalQueries = 0; private Integer baseAllFailedQueries = 0; @@ -117,7 +113,7 @@ public void testEqlRestUsage() throws IOException { // it doesn't matter if the index is already there (probably created by another test); _if_ its mapping is the expected one // it should be enough if (client().performRequest(new Request("HEAD", "/" + DataLoader.TEST_INDEX)).getStatusLine().getStatusCode() == 404) { - DataLoader.loadDatasetIntoEs(highLevelClient(), this::createParser); + DataLoader.loadDatasetIntoEs(client(), this::createParser); } String defaultPipe = "pipe_tail"; @@ -382,14 +378,6 @@ private void assertFeaturesMetricsExcept(Map responseAsMap, Set< } } - private RestHighLevelClient highLevelClient() { - if (highLevelClient == null) { - highLevelClient = new RestHighLevelClient(client(), ignore -> {}, Collections.emptyList()) { - }; - } - return highLevelClient; - } - @Override protected Settings restClientSettings() { String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); diff --git a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java index 6f9c8c9373359..1d5ed1ffdcbab 100644 --- a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java +++ b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java @@ -19,7 +19,6 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -38,7 +37,6 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; @@ -47,14 +45,12 @@ @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) @TestLogging(value = "org.elasticsearch.xpack.eql.EsEQLCorrectnessIT:INFO", reason = "Log query execution time") -@SuppressWarnings("removal") public class EsEQLCorrectnessIT extends ESRestTestCase { private static final String PARAM_FORMATTING = "%1$s"; private static final String QUERIES_FILENAME = "queries.toml"; private static Properties CFG; - private static RestHighLevelClient highLevelClient; private static RequestOptions COMMON_REQUEST_OPTIONS; private static long totalTime = 0; @@ -117,14 +113,6 @@ public EsEQLCorrectnessIT(EqlSpec spec) { this.spec = spec; } - private RestHighLevelClient highLevelClient() { - if (highLevelClient == null) { - highLevelClient = new RestHighLevelClient(client(), ignore -> {}, Collections.emptyList()) { - }; - } - return highLevelClient; - } - @ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING) public static Iterable parameters() throws Exception { Collection specs; diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle b/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle index 47a405517a309..ddafb5f0a9941 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle @@ -1,52 +1,10 @@ -import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE - -apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('eql:qa:common')) } -def remoteClusterReg = testClusters.register('remote-cluster') { - testDistribution = 'DEFAULT' - numberOfNodes = 2 - setting 'node.roles', '[data,ingest,master]' - setting 'xpack.ml.enabled', 'false' - setting 'xpack.watcher.enabled', 'false' - setting 'xpack.security.enabled', 'true' - setting 'xpack.security.autoconfiguration.enabled', 'false' - - user username: "test_user", password: "x-pack-test-password" -} - -def integTestClusterReg = testClusters.register('javaRestTest') { - testDistribution = 'DEFAULT' - setting 'xpack.ml.enabled', 'false' - setting 'xpack.watcher.enabled', 'false' - setting 'cluster.remote.my_remote_cluster.seeds', { - remoteClusterReg.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - }, IGNORE_VALUE - setting 'cluster.remote.connections_per_cluster', "1" - setting 'xpack.security.enabled', 'true' - setting 'xpack.security.autoconfiguration.enabled', 'false' - - user username: "test_user", password: "x-pack-test-password" -} - -tasks.register("startRemoteCluster", DefaultTestClustersTask.class) { - useCluster remoteClusterReg - doLast { - "Starting remote cluster before integ tests and integTest cluster is started" - } -} - -tasks.named("javaRestTest").configure { - dependsOn 'startRemoteCluster' - useCluster remoteClusterReg - doFirst { - nonInputProperties.systemProperty 'tests.rest.cluster.remote.host', remoteClusterReg.map(c->c.getAllHttpSocketURI().get(0)) - nonInputProperties.systemProperty 'tests.rest.cluster.remote.user', "test_user" - nonInputProperties.systemProperty 'tests.rest.cluster.remote.password', "x-pack-test-password" - } +tasks.named('javaRestTest') { + usesDefaultDistribution() + maxParallelForks = 1 } -tasks.named("check").configure {dependsOn("javaRestTest") } // run these tests as part of the "check" task diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java index ad8e94e50e8f8..c20968871472f 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java @@ -7,14 +7,35 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlDateNanosSpecTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.util.List; import static org.elasticsearch.test.eql.DataLoader.TEST_NANOS_INDEX; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterIndex; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlDateNanosIT extends EqlDateNanosSpecTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } public EqlDateNanosIT(String query, String name, List eventIds, String[] joinKeys, Integer size, Integer maxSamplesPerKey) { super(remoteClusterIndex(TEST_NANOS_INDEX), query, name, eventIds, joinKeys, size, maxSamplesPerKey); diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java index 7e79b55deb415..774c19d02adf0 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java @@ -7,14 +7,35 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlExtraSpecTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.util.List; import static org.elasticsearch.test.eql.DataLoader.TEST_EXTRA_INDEX; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterIndex; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlExtraIT extends EqlExtraSpecTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } public EqlExtraIT(String query, String name, List eventIds, String[] joinKeys, Integer size, Integer maxSamplesPerKey) { super(remoteClusterIndex(TEST_EXTRA_INDEX), query, name, eventIds, joinKeys, size, maxSamplesPerKey); diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java index 2655f2c393ab6..fc0b5c8b34829 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java @@ -7,11 +7,32 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterPattern; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlRestIT extends EqlRestTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } @Override protected String indexPattern(String pattern) { diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java index a9f1d22ce0969..014df238bd363 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java @@ -7,13 +7,34 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlRestValidationTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.io.IOException; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterPattern; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlRestValidationIT extends EqlRestValidationTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } @Override protected String getInexistentIndexErrorMessage() { diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java index 0d36c6938666b..1502c250bd058 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java @@ -7,14 +7,35 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlSampleTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.util.List; import static org.elasticsearch.test.eql.DataLoader.TEST_SAMPLE; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterPattern; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlSampleIT extends EqlSampleTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } public EqlSampleIT(String query, String name, List eventIds, String[] joinKeys, Integer size, Integer maxSamplesPerKey) { super(remoteClusterPattern(TEST_SAMPLE), query, name, eventIds, joinKeys, size, maxSamplesPerKey); diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java index 15bac1be1706b..795fe4e103a31 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java @@ -7,14 +7,35 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlSampleMultipleEntriesTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.util.List; import static org.elasticsearch.test.eql.DataLoader.TEST_SAMPLE_MULTI; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterPattern; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlSampleMultipleEntriesIT extends EqlSampleMultipleEntriesTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } public EqlSampleMultipleEntriesIT( String query, diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java index ffa83ae362f50..2cddecb644a1a 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java @@ -7,14 +7,35 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.eql.EqlSpecTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.util.List; import static org.elasticsearch.test.eql.DataLoader.TEST_INDEX; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.LOCAL_CLUSTER; +import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.REMOTE_CLUSTER; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterIndex; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlSpecIT extends EqlSpecTestCase { + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(REMOTE_CLUSTER).around(LOCAL_CLUSTER); + + @Override + protected String getTestRestCluster() { + return LOCAL_CLUSTER.getHttpAddresses(); + } + + @Override + protected String getRemoteCluster() { + return REMOTE_CLUSTER.getHttpAddresses(); + } public EqlSpecIT(String query, String name, List eventIds, String[] joinKeys, Integer size, Integer maxSamplesPerKey) { super(remoteClusterIndex(TEST_INDEX), query, name, eventIds, joinKeys, size, maxSamplesPerKey); diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/RemoteClusterTestUtils.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/RemoteClusterTestUtils.java index 004b0d0683732..1386ea466db27 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/RemoteClusterTestUtils.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/javaRestTest/java/org/elasticsearch/xpack/eql/RemoteClusterTestUtils.java @@ -7,10 +7,38 @@ package org.elasticsearch.xpack.eql; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; + import java.util.StringJoiner; public class RemoteClusterTestUtils { - public static final String REMOTE_CLUSTER_NAME = "my_remote_cluster"; // gradle defined + public static final String REMOTE_CLUSTER_NAME = "my_remote_cluster"; + + public static final ElasticsearchCluster REMOTE_CLUSTER = ElasticsearchCluster.local() + .name(REMOTE_CLUSTER_NAME) + .distribution(DistributionType.DEFAULT) + .nodes(2) + .setting("node.roles", "[data,ingest,master]") + .setting("xpack.ml.enabled", "false") + .setting("xpack.watcher.enabled", "false") + .setting("xpack.security.enabled", "true") + .setting("xpack.security.autoconfiguration.enabled", "false") + .user("test_user", "x-pack-test-password") + .shared(true) + .build(); + + public static final ElasticsearchCluster LOCAL_CLUSTER = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.ml.enabled", "false") + .setting("xpack.watcher.enabled", "false") + .setting("cluster.remote.my_remote_cluster.seeds", () -> "\"" + REMOTE_CLUSTER.getTransportEndpoint(0) + "\"") + .setting("cluster.remote.connections_per_cluster", "1") + .setting("xpack.security.enabled", "true") + .setting("xpack.security.autoconfiguration.enabled", "false") + .user("test_user", "x-pack-test-password") + .shared(true) + .build(); public static String remoteClusterIndex(String indexName) { return REMOTE_CLUSTER_NAME + ":" + indexName; diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index cdc3279a8a696..5f1911dd579bf 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -21,6 +21,7 @@ artifacts { tasks.named('javaRestTest') { usesDefaultDistribution() + maxParallelForks = 1 } tasks.named('yamlRestTest') { usesDefaultDistribution() diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java index 2b9e3839dfaac..1df10fde7fde5 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlDateNanosSpecTestCase; import org.junit.ClassRule; import java.util.List; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlDateNanosIT extends EqlDateNanosSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java index ebaf30c365f22..8af8fcac087b5 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlExtraIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlExtraSpecTestCase; import org.junit.ClassRule; import java.util.List; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlExtraIT extends EqlExtraSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlMissingEventsIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlMissingEventsIT.java index 416d1da8deafe..05557fb4883b3 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlMissingEventsIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlMissingEventsIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlMissingEventsSpecTestCase; import org.junit.ClassRule; import java.util.List; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlMissingEventsIT extends EqlMissingEventsSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java index 44312a8cbd25e..a29086f91e421 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java @@ -7,14 +7,18 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlRestTestCase; import org.junit.ClassRule; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlRestIT extends EqlRestTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java index 46fd25d2f163c..da984eac56841 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestValidationIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlRestValidationTestCase; import org.junit.ClassRule; import java.io.IOException; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlRestValidationIT extends EqlRestValidationTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java index acc44688edd86..dc2c653fad89e 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlSampleTestCase; import org.junit.ClassRule; import java.util.List; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlSampleIT extends EqlSampleTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java index 6eccece6954f6..af1ade9120bbd 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSampleMultipleEntriesIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlSampleMultipleEntriesTestCase; import org.junit.ClassRule; import java.util.List; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlSampleMultipleEntriesIT extends EqlSampleMultipleEntriesTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java index c2313595d3dbc..7aac0ae336c8a 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSpecIT.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlSpecTestCase; import org.junit.ClassRule; import java.util.List; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlSpecIT extends EqlSpecTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlStatsIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlStatsIT.java index efa3866c8df90..828fad133c073 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlStatsIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlStatsIT.java @@ -7,14 +7,18 @@ package org.elasticsearch.xpack.eql; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.stats.EqlUsageRestTestCase; import org.junit.ClassRule; +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class EqlStatsIT extends EqlUsageRestTestCase { @ClassRule - public static final ElasticsearchCluster cluster = EqlTestCluster.getCluster(); + public static final ElasticsearchCluster cluster = EqlTestCluster.CLUSTER; @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java index 66ea80dd1c65f..a3831b23c2ef2 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java @@ -10,15 +10,14 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -public class EqlTestCluster { +public abstract class EqlTestCluster { - public static ElasticsearchCluster getCluster() { - return ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "basic") - .setting("xpack.monitoring.collection.enabled", "true") - .setting("xpack.security.enabled", "false") - .build(); - } + public static final ElasticsearchCluster CLUSTER = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "basic") + .setting("xpack.monitoring.collection.enabled", "true") + .setting("xpack.security.enabled", "false") + .shared(true) + .build(); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java index 75d3dd45f110c..0d1b363eeb984 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java @@ -8,16 +8,12 @@ import org.elasticsearch.xpack.ql.QlClientException; -public abstract class EqlClientException extends QlClientException { +public class EqlClientException extends QlClientException { - protected EqlClientException(String message, Object... args) { + public EqlClientException(String message, Object... args) { super(message, args); } - protected EqlClientException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - protected EqlClientException(String message, Throwable cause) { super(message, cause); } @@ -26,7 +22,4 @@ protected EqlClientException(Throwable cause, String message, Object... args) { super(cause, message, args); } - protected EqlClientException(Throwable cause) { - super(cause); - } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java index 7c21b68d7fe9e..bed7e1634ac7c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.eql.analysis; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -14,12 +13,8 @@ public class VerificationException extends EqlClientException { - protected VerificationException(Collection sources) { + public VerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java index dea45e4b9d766..b26c815c1a2b5 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.eql.querydsl.container.FieldExtractorRegistry; import org.elasticsearch.xpack.eql.session.EqlConfiguration; import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.ComputingExtractor; @@ -177,7 +178,7 @@ public Executable assemble( */ public Executable assemble(List> listOfKeys, List plans, Limit limit) { if (cfg.fetchSize() > SAMPLE_MAX_PAGE_SIZE) { - throw new EqlIllegalArgumentException("Fetch size cannot be greater than [{}]", SAMPLE_MAX_PAGE_SIZE); + throw new InvalidArgumentException("Fetch size cannot be greater than [{}]", SAMPLE_MAX_PAGE_SIZE); } FieldExtractorRegistry extractorRegistry = new FieldExtractorRegistry(); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java index b8dde5574530a..a96102dad6cfb 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java @@ -47,7 +47,7 @@ public class SampleIterator implements Executable { - private final Logger log = LogManager.getLogger(SampleIterator.class); + private static final Logger log = LogManager.getLogger(SampleIterator.class); private final QueryClient client; private final List criteria; diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java index aff398a523e93..8640378878f10 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; import org.elasticsearch.xpack.eql.execution.search.extractor.CompositeKeyExtractor; import org.elasticsearch.xpack.eql.execution.search.extractor.FieldHitExtractor; @@ -156,7 +157,7 @@ public static HitExtractor createExtractor(FieldExtraction ref, EqlConfiguration hitNames.add(he.hitName()); if (hitNames.size() > 1) { - throw new EqlIllegalArgumentException("Multi-level nested fields [{}] not supported yet", hitNames); + throw new EqlClientException("Multi-level nested fields [{}] not supported yet", hitNames); } return new HitExtractorInput(l.source(), l.expression(), he); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java index c53c567b2a2f9..adb8ee1b43c02 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java @@ -32,7 +32,7 @@ public class SequenceMatcher { private static final String CB_INFLIGHT_LABEL = "sequence_inflight"; private static final String CB_COMPLETED_LABEL = "sequence_completed"; - private final Logger log = LogManager.getLogger(SequenceMatcher.class); + private static final Logger log = LogManager.getLogger(SequenceMatcher.class); static class Stats { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java index bbee76e976f92..d692bc376de01 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java @@ -77,7 +77,7 @@ public class TumblingWindow implements Executable { */ private static final int MISSING_EVENTS_SEQUENCES_CHECK_BATCH_SIZE = 1000; - private final Logger log = LogManager.getLogger(TumblingWindow.class); + private static final Logger log = LogManager.getLogger(TumblingWindow.class); /** * Simple cache for removing duplicate strings (such as index name or common keys). diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java index 3ac70ccc4b9c1..c0993bbb9c7c6 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java @@ -40,9 +40,8 @@ import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; -public class EqlFunctionRegistry extends FunctionRegistry { +public final class EqlFunctionRegistry extends FunctionRegistry { - @SuppressWarnings("this-escape") public EqlFunctionRegistry() { register(functions()); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java index 6bd7d76737849..851f7786c529f 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java @@ -36,11 +36,10 @@ /** * EQL specific function for parsing strings into numbers. */ -public class ToNumber extends ScalarFunction implements OptionalArgument { +public final class ToNumber extends ScalarFunction implements OptionalArgument { private final Expression value, base; - @SuppressWarnings("this-escape") public ToNumber(Source source, Expression value, Expression base) { super(source, Arrays.asList(value, base != null ? base : new Literal(source, null, DataTypes.NULL))); this.value = value; diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java index 214600c69b6cf..2e64a54eac16b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import java.io.IOException; @@ -79,7 +80,7 @@ public static Object doProcess(Object value, Object base) { return Long.parseLong(value.toString(), radix); } } catch (NumberFormatException e) { - throw new EqlIllegalArgumentException("Unable to convert [{}] to number of base [{}]", value, radix); + throw new InvalidArgumentException(e, "Unable to convert [{}] to number of base [{}]", value, radix); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java index c0eb7e42e7e56..a6854dc76c0c7 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java @@ -40,11 +40,10 @@ * between(source, left, right[, greedy=false]) * Extracts a substring from source that’s between left and right substrings */ -public class Between extends CaseInsensitiveScalarFunction implements OptionalArgument { +public final class Between extends CaseInsensitiveScalarFunction implements OptionalArgument { private final Expression input, left, right, greedy; - @SuppressWarnings("this-escape") public Between(Source source, Expression input, Expression left, Expression right, Expression greedy, boolean caseInsensitive) { super(source, Arrays.asList(input, left, right, defaultGreedy(greedy)), caseInsensitive); this.input = input; @@ -135,7 +134,7 @@ public ScriptTemplate asScript() { return asScriptFrom(inputScript, leftScript, rightScript, greedyScript); } - protected ScriptTemplate asScriptFrom( + private ScriptTemplate asScriptFrom( ScriptTemplate inputScript, ScriptTemplate leftScript, ScriptTemplate rightScript, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java index 143513f59cdd2..d802fd60b8609 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.CIDRUtils; -import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.ql.util.Check; @@ -72,7 +72,7 @@ public static Object doProcess(Object source, List addresses) { try { return CIDRUtils.isInRange((String) source, arr); } catch (IllegalArgumentException e) { - throw new EqlIllegalArgumentException(e.getMessage()); + throw new InvalidArgumentException(e.getMessage()); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java index fe8d60cc46986..9dabb523f93a7 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java @@ -36,11 +36,10 @@ * Find the first position (zero-indexed) of a string where a substring is found. * If the optional parameter start is provided, then this will find the first occurrence at or after the start position. */ -public class IndexOf extends CaseInsensitiveScalarFunction implements OptionalArgument { +public final class IndexOf extends CaseInsensitiveScalarFunction implements OptionalArgument { private final Expression input, substring, start; - @SuppressWarnings("this-escape") public IndexOf(Source source, Expression input, Expression substring, Expression start, boolean caseInsensitive) { super(source, asList(input, substring, start != null ? start : new Literal(source, null, DataTypes.NULL)), caseInsensitive); this.input = input; @@ -103,7 +102,7 @@ public ScriptTemplate asScript() { return asScriptFrom(inputScript, substringScript, startScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate substringScript, ScriptTemplate startScript) { + private ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate substringScript, ScriptTemplate startScript) { return new ScriptTemplate( format( Locale.ROOT, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java index 4137d8d41e3fe..f79785636ca59 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java @@ -32,18 +32,16 @@ * Returns true if the source field matches any of the provided regular expressions * Refer to: https://eql.readthedocs.io/en/latest/query-guide/functions.html#match */ -public class Match extends BaseSurrogateFunction { +public final class Match extends BaseSurrogateFunction { private final Expression field; private final List patterns; private final boolean caseInsensitive; - @SuppressWarnings("this-escape") public Match(Source source, Expression field, List patterns, boolean caseInsensitive) { this(source, CollectionUtils.combine(singletonList(field), patterns), caseInsensitive); } - @SuppressWarnings("this-escape") private Match(Source source, List children, boolean caseInsensitive) { super(source, children); this.field = children().get(0); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java index 524ccf0422ccb..632ed8e72e4bf 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java @@ -38,11 +38,10 @@ * EQL specific substring function - similar to the one in Python. * Note this is different than the one in SQL. */ -public class Substring extends ScalarFunction implements OptionalArgument { +public final class Substring extends ScalarFunction implements OptionalArgument { private final Expression input, start, end; - @SuppressWarnings("this-escape") public Substring(Source source, Expression input, Expression start, Expression end) { super(source, Arrays.asList(input, start, end != null ? end : new Literal(source, null, DataTypes.NULL))); this.input = input; @@ -98,7 +97,7 @@ public ScriptTemplate asScript() { return asScriptFrom(inputScript, startScript, endScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate startScript, ScriptTemplate endScript) { + private ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate startScript, ScriptTemplate endScript) { return new ScriptTemplate( format( Locale.ROOT, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java index 381d7e2ecfd38..5b8bf71165266 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.eql.parser.EqlBaseParser.LogicalBinaryContext; import org.elasticsearch.xpack.eql.parser.EqlBaseParser.LogicalNotContext; import org.elasticsearch.xpack.eql.parser.EqlBaseParser.PredicateContext; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; @@ -205,8 +205,8 @@ public Literal visitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) { try { return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException siae) { - throw new ParsingException(source, siae.getMessage()); + } catch (InvalidArgumentException ciae) { + throw new ParsingException(source, ciae.getMessage()); } } @@ -242,13 +242,13 @@ public Literal visitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) { try { Number value = StringUtils.parseIntegral(text); return new Literal(source, value, DataTypes.fromJava(value)); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException ciae) { // if it's too large, then quietly try to parse as a float instead try { return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException ignored) {} + } catch (InvalidArgumentException ignored) {} - throw new ParsingException(source, siae.getMessage()); + throw new ParsingException(source, ciae.getMessage()); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java index 8e96ea41c0a84..2f57bc021a1c0 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java @@ -339,7 +339,7 @@ public Sequence visitSequence(SequenceContext ctx) { } if (queries.stream().allMatch(KeyedFilter::isMissingEventFilter)) { - throw new IllegalStateException("A sequence requires at least one positive event query; found none"); + throw new ParsingException(source, "A sequence requires at least one positive event query; found none"); } return new Sequence(source, queries, until, maxSpan, fieldTimestamp(), fieldTiebreaker(), resultPosition()); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java index 820b7451ac7a0..1c5e85f58795a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.eql.parser; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.ql.tree.Source; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java index 25ac30caf0c2e..87a6119d38acf 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.eql.planner; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -22,8 +21,4 @@ protected PlanningException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java index 5f71d316333ea..f573ea805b0b9 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java @@ -61,7 +61,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; import static org.elasticsearch.xpack.ql.plugin.TransportActionUtils.executeRequestWithRetryAttempt; -public class TransportEqlSearchAction extends HandledTransportAction +public final class TransportEqlSearchAction extends HandledTransportAction implements AsyncTaskManagementService.AsyncOperation { @@ -73,7 +73,6 @@ public class TransportEqlSearchAction extends HandledTransportAction asyncTaskManagementService; - @SuppressWarnings("this-escape") @Inject public TransportEqlSearchAction( Settings settings, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java index 4c0d37026fe04..7e35a0ba1eb48 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java @@ -14,6 +14,11 @@ public class MathUtils { public static int abs(int number) { if (number == Integer.MIN_VALUE) { + // TODO: can this function be removed? + // This case should never occur, as `number` is either a non-negative user-provided input, + // or the result of opposing sign integers summation. + // Additionally, the math on offset/limit is inexact anyways. + // But, if this can somehow happen, we should (1) have a test and (2) switch to exact math everywhere. throw new EqlIllegalArgumentException("[" + number + "] cannot be negated since the result is outside the range"); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java index 0f27a4989147a..81983adbbcbe9 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java @@ -7,10 +7,7 @@ package org.elasticsearch.xpack.eql.util; -import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; -import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.regex.LikePattern; -import org.elasticsearch.xpack.ql.type.DataTypes; public final class StringUtils { @@ -29,11 +26,4 @@ public static LikePattern toLikePattern(String s) { return new LikePattern(likeString, escape); } - - public static LikePattern toLikePattern(Expression expression) { - if (expression.foldable() == false || DataTypes.isString(expression.dataType()) == false) { - throw new EqlIllegalArgumentException("Invalid like pattern received {}", expression); - } - return toLikePattern(expression.fold().toString()); - } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java index cb7c01e0047bf..3b1183ce20aa1 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.eql.expression.function.scalar.math; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.QlException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l; @@ -19,12 +21,17 @@ private static Object process(Object value, Object base) { return new ToNumber(EMPTY, l(value), l(base)).makePipe().asProcessor().process(null); } + private static String error(Object value, Object base, Class exceptionClass) { + Exception e = expectThrows(exceptionClass, () -> new ToNumber(EMPTY, l(value), l(base)).makePipe().asProcessor().process(null)); + return e.getMessage(); + } + private static String error(Object value, Object base) { - QlIllegalArgumentException saie = expectThrows( - QlIllegalArgumentException.class, - () -> new ToNumber(EMPTY, l(value), l(base)).makePipe().asProcessor().process(null) - ); - return saie.getMessage(); + return error(value, base, QlIllegalArgumentException.class); + } + + private static String clientError(Object value, Object base) { + return error(value, base, InvalidArgumentException.class); } public void toNumberWithLongRange() { @@ -121,7 +128,7 @@ public void toNumberWithUnsupportedDoubleBase() { } public void testNegativeBase16() { - assertEquals("Unable to convert [-0x1] to number of base [16]", error("-0x1", 16)); + assertEquals("Unable to convert [-0x1] to number of base [16]", clientError("-0x1", 16)); } public void testNumberInvalidDataType() { @@ -139,11 +146,11 @@ public void testInvalidBase() { } public void testInvalidSourceString() { - assertEquals("Unable to convert [] to number of base [10]", error("", null)); - assertEquals("Unable to convert [] to number of base [16]", error("", 16)); - assertEquals("Unable to convert [foo] to number of base [10]", error("foo", null)); - assertEquals("Unable to convert [foo] to number of base [16]", error("foo", 16)); - assertEquals("Unable to convert [1.2.3.4] to number of base [10]", error("1.2.3.4", 10)); - assertEquals("Unable to convert [1.2.3.4] to number of base [16]", error("1.2.3.4", 16)); + assertEquals("Unable to convert [] to number of base [10]", clientError("", null)); + assertEquals("Unable to convert [] to number of base [16]", clientError("", 16)); + assertEquals("Unable to convert [foo] to number of base [10]", clientError("foo", null)); + assertEquals("Unable to convert [foo] to number of base [16]", clientError("foo", 16)); + assertEquals("Unable to convert [1.2.3.4] to number of base [10]", clientError("1.2.3.4", 10)); + assertEquals("Unable to convert [1.2.3.4] to number of base [16]", clientError("1.2.3.4", 16)); } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java index 66e544ed55ec3..1e218749a4991 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.eql.expression.function.scalar.string; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -36,8 +36,8 @@ public void testCIDRMatchFunctionInvalidInput() { ArrayList addresses = new ArrayList<>(); // Invalid source address - EqlIllegalArgumentException e = expectThrows( - EqlIllegalArgumentException.class, + Exception e = expectThrows( + InvalidArgumentException.class, () -> new CIDRMatch(EMPTY, l("10.6.48"), addresses).makePipe().asProcessor().process(null) ); @@ -46,7 +46,7 @@ public void testCIDRMatchFunctionInvalidInput() { // Invalid match ip address addresses.add(l("10.6.48")); e = expectThrows( - EqlIllegalArgumentException.class, + InvalidArgumentException.class, () -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null) ); @@ -56,7 +56,7 @@ public void testCIDRMatchFunctionInvalidInput() { // Invalid CIDR addresses.add(l("10.6.12/12")); e = expectThrows( - EqlIllegalArgumentException.class, + InvalidArgumentException.class, () -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null) ); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 8dca74109b2cc..75b02ff911df7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; import java.util.List; @@ -208,6 +209,24 @@ interface Builder extends BlockLoader.Builder, Releasable { * Builds the block. This method can be called multiple times. */ Block build(); + + /** + * Build many {@link Block}s at once, releasing any partially built blocks + * if any fail. + */ + static Block[] buildAll(Block.Builder... builders) { + Block[] blocks = new Block[builders.length]; + try { + for (int b = 0; b < blocks.length; b++) { + blocks[b] = builders[b].build(); + } + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.closeExpectNoException(blocks); + } + } + return blocks; + } } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 842f8719f19dc..b6ba42f953609 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -16,7 +16,7 @@ /** * {@link Vector} where each entry references a lucene document. */ -public class DocVector extends AbstractVector implements Vector { +public final class DocVector extends AbstractVector implements Vector { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DocVector.class); @@ -48,7 +48,6 @@ public class DocVector extends AbstractVector implements Vector { final DocBlock block; - @SuppressWarnings("this-escape") public DocVector(IntVector shards, IntVector segments, IntVector docs, Boolean singleSegmentNonDecreasing) { super(shards.getPositionCount(), null); this.shards = shards; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index be3ee5ff40792..bd06296309886 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -24,6 +24,7 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -56,6 +57,10 @@ public class Driver implements Releasable, Describable { private final AtomicReference cancelReason = new AtomicReference<>(); private final AtomicReference> blocked = new AtomicReference<>(); + + private final AtomicBoolean started = new AtomicBoolean(); + private final SubscribableListener completionListener = new SubscribableListener<>(); + /** * Status reported to the tasks API. We write the status at most once every * {@link #statusNanos}, as soon as loop has finished and after {@link #statusNanos} @@ -149,7 +154,7 @@ private SubscribableListener run(TimeValue maxTime, int maxIterations) { if (isFinished()) { status.set(updateStatus(DriverStatus.Status.DONE)); driverContext.finish(); - releasable.close(); + Releasables.close(releasable, driverContext.getSnapshot()); } else { status.set(updateStatus(DriverStatus.Status.WAITING)); } @@ -159,7 +164,7 @@ private SubscribableListener run(TimeValue maxTime, int maxIterations) { /** * Whether the driver has run the chain of operators to completion. */ - public boolean isFinished() { + private boolean isFinished() { return activeOperators.isEmpty(); } @@ -168,6 +173,19 @@ public void close() { drainAndCloseOperators(null); } + /** + * Abort the driver and wait for it to finish + */ + public void abort(Exception reason, ActionListener listener) { + completionListener.addListener(listener); + if (started.compareAndSet(false, true)) { + drainAndCloseOperators(reason); + completionListener.onFailure(reason); + } else { + cancel(reason.getMessage()); + } + } + private SubscribableListener runSingleLoopIteration() { ensureNotCancelled(); boolean movedPage = false; @@ -261,8 +279,11 @@ public static void start( int maxIterations, ActionListener listener ) { - driver.status.set(driver.updateStatus(DriverStatus.Status.STARTING)); - schedule(DEFAULT_TIME_BEFORE_YIELDING, maxIterations, threadContext, executor, driver, listener); + driver.completionListener.addListener(listener); + if (driver.started.compareAndSet(false, true)) { + driver.status.set(driver.updateStatus(DriverStatus.Status.STARTING)); + schedule(DEFAULT_TIME_BEFORE_YIELDING, maxIterations, threadContext, executor, driver, driver.completionListener); + } } // Drains all active operators and closes them. @@ -279,7 +300,7 @@ private void drainAndCloseOperators(@Nullable Exception e) { itr.remove(); } driverContext.finish(); - Releasables.closeWhileHandlingException(releasable); + Releasables.closeWhileHandlingException(releasable, driverContext.getSnapshot()); } private static void schedule( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index b21671cd30517..85860cf8766f1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import java.util.Collections; import java.util.IdentityHashMap; @@ -69,7 +70,12 @@ public BlockFactory blockFactory() { } /** A snapshot of the driver context. */ - public record Snapshot(Set releasables) {} + public record Snapshot(Set releasables) implements Releasable { + @Override + public void close() { + Releasables.close(releasables); + } + } /** * Adds a releasable to this context. Releasables are identified by Object identity. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java index 788fc2887ebd9..4f16a615572b7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.TaskCancelledException; import java.util.HashMap; @@ -84,13 +83,6 @@ private void done() { responseHeaders.setOnce(driverIndex, threadContext.getResponseHeaders()); if (counter.countDown()) { mergeResponseHeaders(responseHeaders); - for (Driver d : drivers) { - if (d.status().status() == DriverStatus.Status.QUEUED) { - d.close(); - } else { - Releasables.close(d.driverContext().getSnapshot().releasables()); - } - } Exception error = failure.get(); if (error != null) { listener.onFailure(error); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java index b486318f85405..38d879f8f7ad4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java @@ -52,7 +52,13 @@ protected void start(Driver driver, ActionListener driverListener) { new DriverRequest(driver, executor), parentTask, TransportRequestOptions.EMPTY, - TransportResponseHandler.empty(executor, driverListener) + TransportResponseHandler.empty( + executor, + // The TransportResponseHandler can be notified while the Driver is still running during node shutdown + // or the Driver hasn't started when the parent task is canceled. In such cases, we should abort + // the Driver and wait for it to finish. + ActionListener.wrap(driverListener::onResponse, e -> driver.abort(e, driverListener)) + ) ); } }; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 9657d60376763..2ebc9c82c6d98 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -91,12 +91,6 @@ public long ramBytesUsed() { return SHALLOW_SIZE + keys.ramBytesUsed() + orderByCompositeKeyAscending.size() / Byte.SIZE + values.ramBytesUsed(); } - private void clear() { - keys.clear(); - orderByCompositeKeyAscending.clear(); - values.clear(); - } - @Override public void close() { Releasables.closeExpectNoException(keys, values); @@ -405,7 +399,17 @@ private Iterator toPages() { p++; if (p == size) { - result.add(new Page(Arrays.stream(builders).map(ResultBuilder::build).toArray(Block[]::new))); + Block[] blocks = new Block[builders.length]; + try { + for (int b = 0; b < blocks.length; b++) { + blocks[b] = builders[b].build(); + } + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.closeExpectNoException(blocks); + } + } + result.add(new Page(blocks)); Releasables.closeExpectNoException(builders); builders = null; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java index 92c6114d86897..e41c82b89772e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Tuple; @@ -44,23 +45,23 @@ public TupleBlockSourceOperator(BlockFactory blockFactory, List item = values.get(positionOffset + i); - if (item.v1() == null) { - blockBuilder1.appendNull(); - } else { - blockBuilder1.appendLong(item.v1()); - } - if (item.v2() == null) { - blockBuilder2.appendNull(); - } else { - blockBuilder2.appendLong(item.v2()); + try (var blockBuilder1 = blockFactory.newLongBlockBuilder(length); var blockBuilder2 = blockFactory.newLongBlockBuilder(length)) { + for (int i = 0; i < length; i++) { + Tuple item = values.get(positionOffset + i); + if (item.v1() == null) { + blockBuilder1.appendNull(); + } else { + blockBuilder1.appendLong(item.v1()); + } + if (item.v2() == null) { + blockBuilder2.appendNull(); + } else { + blockBuilder2.appendLong(item.v2()); + } } + currentPosition += length; + return new Page(Block.Builder.buildAll(blockBuilder1, blockBuilder2)); } - currentPosition += length; - return new Page(blockBuilder1.build(), blockBuilder2.build()); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 6c5bab9b8f784..f43873b4fdfd9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.tests.util.RamUsageTester; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -36,6 +37,7 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.TupleBlockSourceOperator; import org.elasticsearch.core.Tuple; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.versionfield.Version; @@ -234,15 +236,29 @@ public long accumulateObject(Object o, long shallowSize, Map fiel public void testRandomTopN() { for (boolean asc : List.of(true, false)) { - int limit = randomIntBetween(1, 20); - List inputValues = randomList(0, 5000, ESTestCase::randomLong); - Comparator comparator = asc ? naturalOrder() : reverseOrder(); - List expectedValues = inputValues.stream().sorted(comparator).limit(limit).toList(); - List outputValues = topNLong(inputValues, limit, asc, false); - assertThat(outputValues, equalTo(expectedValues)); + testRandomTopN(asc, driverContext()); } } + public void testRandomTopNCranky() { + try { + testRandomTopN(randomBoolean(), crankyDriverContext()); + logger.info("cranky didn't break us"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testRandomTopN(boolean asc, DriverContext context) { + int limit = randomIntBetween(1, 20); + List inputValues = randomList(0, 5000, ESTestCase::randomLong); + Comparator comparator = asc ? naturalOrder() : reverseOrder(); + List expectedValues = inputValues.stream().sorted(comparator).limit(limit).toList(); + List outputValues = topNLong(context, inputValues, limit, asc, false); + assertThat(outputValues, equalTo(expectedValues)); + } + public void testBasicTopN() { List values = Arrays.asList(2L, 1L, 4L, null, 5L, 10L, null, 20L, 4L, 100L); assertThat(topNLong(values, 1, true, false), equalTo(Arrays.asList(1L))); @@ -267,8 +283,15 @@ public void testBasicTopN() { assertThat(topNLong(values, 100, false, true), equalTo(Arrays.asList(null, null, 100L, 20L, 10L, 5L, 4L, 4L, 2L, 1L))); } - private List topNLong(List inputValues, int limit, boolean ascendingOrder, boolean nullsFirst) { + private List topNLong( + DriverContext driverContext, + List inputValues, + int limit, + boolean ascendingOrder, + boolean nullsFirst + ) { return topNTwoColumns( + driverContext, inputValues.stream().map(v -> tuple(v, 0L)).toList(), limit, List.of(LONG, LONG), @@ -277,6 +300,10 @@ private List topNLong(List inputValues, int limit, boolean ascending ).stream().map(Tuple::v1).toList(); } + private List topNLong(List inputValues, int limit, boolean ascendingOrder, boolean nullsFirst) { + return topNLong(driverContext(), inputValues, limit, ascendingOrder, nullsFirst); + } + public void testCompareInts() { testCompare( new Page( @@ -422,6 +449,7 @@ public void testTopNTwoColumns() { List> values = Arrays.asList(tuple(1L, 1L), tuple(1L, 2L), tuple(null, null), tuple(null, 1L), tuple(1L, null)); assertThat( topNTwoColumns( + driverContext(), values, 5, List.of(LONG, LONG), @@ -432,6 +460,7 @@ public void testTopNTwoColumns() { ); assertThat( topNTwoColumns( + driverContext(), values, 5, List.of(LONG, LONG), @@ -442,6 +471,7 @@ public void testTopNTwoColumns() { ); assertThat( topNTwoColumns( + driverContext(), values, 5, List.of(LONG, LONG), @@ -613,13 +643,13 @@ public void testCollectAllValues_RandomMultiValues() { } private List> topNTwoColumns( + DriverContext driverContext, List> inputValues, int limit, List elementTypes, List encoder, List sortOrders ) { - DriverContext driverContext = driverContext(); List> outputValues = new ArrayList<>(); try ( Driver driver = new Driver( diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml index 720914b579f36..d5f5bee46f50a 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml @@ -1,5 +1,7 @@ --- -setup: +"Bug fix https://github.com/elastic/elasticsearch/issues/99472": + - skip: + features: warnings - do: bulk: index: test @@ -9,11 +11,6 @@ setup: - { "emp_no": 10, "ip1": "127.0", "ip2": "0.1" } - { "index": { } } - { "emp_no": 20 } - ---- -"Bug fix https://github.com/elastic/elasticsearch/issues/99472": - - skip: - features: warnings - do: warnings: - "Line 1:37: evaluation of [to_ip(coalesce(ip1.keyword, \"255.255.255.255\"))] failed, treating result as null. Only first 20 failures recorded." @@ -55,3 +52,70 @@ setup: - length: { values: 2 } - match: { values.0: [ 10, "127.00.1", "127.00.1", null ] } - match: { values.1: [ 20, null, "255.255.255.255", "255.255.255.255"] } + +--- +"Bug fix https://github.com/elastic/elasticsearch/issues/101489": + - do: + indices.create: + index: index1 + body: + mappings: + properties: + http: + properties: + headers: + type: flattened + - do: + indices.create: + index: index2 + body: + mappings: + properties: + http: + properties: + headers: + properties: + location: + type: keyword + - do: + indices.create: + index: index3 + body: + mappings: + properties: + http: + properties: + headers: + properties: + location: + type: text + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "index1" } } + - { "http.headers": { "location": "RO","code": 123 } } + - { "index": { "_index": "index2" } } + - { "http.headers.location": "US" } + - { "index": { "_index": "index3" } } + - { "http.headers.location": "CN" } + - do: + esql.query: + body: + query: 'from index* [metadata _index] | limit 5 | sort _index desc' + - match: { columns.0.name: http.headers } + - match: { columns.0.type: unsupported } + - match: { columns.1.name: http.headers.location } + - match: { columns.1.type: unsupported } + - match: { columns.2.name: _index } + - match: { columns.2.type: keyword } + - length: { values: 3 } + - match: { values.0.0: null } + - match: { values.0.1: null } + - match: { values.0.2: index3 } + - match: { values.1.0: null } + - match: { values.1.1: null } + - match: { values.1.2: index2 } + - match: { values.2.0: null } + - match: { values.2.1: null } + - match: { values.2.2: index1 } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 72fa96710710b..f85dbeda7f6bc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -185,7 +185,7 @@ string:keyword |datetime:date convertFromUnsignedLong row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warning:Line 1:58: evaluation of [to_datetime(ul)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:58: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:58: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range ul:ul | dt:date [9223372036854775808, 520128000000]|1986-06-26T00:00:00.000Z diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 68ed0319047fd..cdc25587793cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -66,7 +66,7 @@ long:long |ul:ul convertDoubleToUL row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warning:Line 1:48: evaluation of [to_ul(1e20)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:48: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [1.0E20] out of [unsigned_long] range +warning:Line 1:48: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E20] out of [unsigned_long] range d:double |ul:ul |overflow:ul 123.4 |123 |null @@ -123,7 +123,7 @@ int:integer |long:long convertULToLong row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warning:Line 1:67: evaluation of [to_long(ul)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:67: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:67: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range ul:ul | long:long [9223372036854775807, 9223372036854775808]|9223372036854775807 @@ -164,7 +164,7 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long convertDoubleToLong row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warning:Line 1:51: evaluation of [to_long(1e19)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:51: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [1.0E19] out of [long] range +warning:Line 1:51: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range d:double |d2l:long |overflow:long 123.4 |123 |null @@ -186,7 +186,7 @@ ROW long = [5013792, 2147483647, 501379200000] // end::to_int-long[] ; warning:Line 2:14: evaluation of [TO_INTEGER(long)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:14: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [501379200000] out of [integer] range +warning:Line 2:14: org.elasticsearch.xpack.ql.InvalidArgumentException: [501379200000] out of [integer] range // tag::to_int-long-result[] long:long |int:integer @@ -198,7 +198,7 @@ convertULToInt row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warning:Line 1:57: evaluation of [to_int(ul)] failed, treating result as null. Only first 20 failures recorded. // UL conversion to int dips into long; not the most efficient, but it's how SQL does it too. -warning:Line 1:57: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:57: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range ul:ul |int:integer [2147483647, 9223372036854775808]|2147483647 @@ -232,7 +232,7 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer |overflow:in convertDoubleToInt row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warning:Line 1:54: evaluation of [to_integer(1e19)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:54: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [1.0E19] out of [long] range +warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range d:double |d2i:integer |overflow:integer 123.4 |123 |null @@ -476,7 +476,7 @@ ROW deg = [90, 180, 270] warningWithFromSource from employees | sort emp_no | limit 1 | eval x = to_long(emp_no) * 10000000 | eval y = to_int(x) > 1 | keep y; warning:Line 1:89: evaluation of [to_int(x)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:89: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100010000000] out of [integer] range +warning:Line 1:89: org.elasticsearch.xpack.ql.InvalidArgumentException: [100010000000] out of [integer] range y:boolean null @@ -486,26 +486,26 @@ null multipleWarnings-Ignore from employees | sort emp_no | eval x = to_long(emp_no) * 10000000 | where to_int(x) > 1 | keep x | limit 1; warning:Line 1:76: evaluation of [to_int(x)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100010000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100020000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100030000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100040000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100050000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100060000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100070000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100080000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100090000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100100000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100110000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100120000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100130000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100140000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100150000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100160000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100170000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100180000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100190000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100200000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100010000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100020000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100030000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100040000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100050000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100060000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100070000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100080000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100090000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100100000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100110000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100120000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100130000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100140000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100150000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100160000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100170000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100180000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100190000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100200000000] out of [integer] range x:long ; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 11c8b14fb76b9..5134e05b4cc3d 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -48,7 +48,10 @@ public void ensureBlocksReleased() { CircuitBreakerService breakerService = internalCluster().getInstance(CircuitBreakerService.class, node); CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.REQUEST); try { - assertBusy(() -> assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L))); + assertBusy(() -> { + logger.info("running tasks: {}", client().admin().cluster().prepareListTasks().get()); + assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); + }); } catch (Exception e) { assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 8b79d5df189c9..342df5209ec95 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.action; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -20,6 +19,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.ArrayList; import java.util.Collection; @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101588") +@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class EsqlActionBreakerIT extends EsqlActionIT { public static class InternalTransportSettingPlugin extends Plugin { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java index 48f03e2df911e..ba539777b36c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java @@ -8,9 +8,9 @@ import org.elasticsearch.xpack.ql.QlClientException; -public abstract class EsqlClientException extends QlClientException { +public class EsqlClientException extends QlClientException { - protected EsqlClientException(String message, Object... args) { + public EsqlClientException(String message, Object... args) { super(message, args); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java index 4a86dd1741daa..4372401e7d8f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -22,8 +21,4 @@ protected VerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 2d4a026afaf6e..2199d4bddaf77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -92,9 +92,8 @@ import java.util.Locale; import java.util.stream.Collectors; -public class EsqlFunctionRegistry extends FunctionRegistry { +public final class EsqlFunctionRegistry extends FunctionRegistry { - @SuppressWarnings("this-escape") public EsqlFunctionRegistry() { register(functions()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 6dcba915186a4..e5ac3e395f6aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -25,7 +25,7 @@ * the engine). * As such the field is marked as unresolved (so the verifier can pick up its usage outside project). */ -public class UnsupportedAttribute extends FieldAttribute implements Unresolvable { +public final class UnsupportedAttribute extends FieldAttribute implements Unresolvable { private final String message; private final boolean hasCustomMessage; @@ -42,7 +42,6 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field this(source, name, field, customMessage, null); } - @SuppressWarnings("this-escape") public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage, NameId id) { super(source, null, name, field, null, Nullability.TRUE, id, false); this.hasCustomMessage = customMessage != null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 2117828be6533..a3d08e4cb6306 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -37,14 +37,13 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; -public class Case extends ScalarFunction implements EvaluatorMapper { +public final class Case extends ScalarFunction implements EvaluatorMapper { record Condition(Expression condition, Expression value) {} private final List conditions; private final Expression elseValue; private DataType dataType; - @SuppressWarnings("this-escape") public Case(Source source, Expression first, List rest) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); int conditionCount = children().size() / 2; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 1e84bf60b0dde..87d6460d70984 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -12,7 +12,6 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -98,12 +97,7 @@ private static int checkedTotalLength(BytesRef[] values) { length += v.length; } if (length > MAX_CONCAT_LENGTH) { - throw new EsqlClientException("concatenating more than [" + MAX_CONCAT_LENGTH + "] bytes is not supported") { - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; // return a 400 response - } - }; + throw new EsqlClientException("concatenating more than [" + MAX_CONCAT_LENGTH + "] bytes is not supported"); } return length; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 5d3108a785f59..f24324fac2fbd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; @@ -116,7 +117,7 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { try { number = StringUtils.parseIntegral(text); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException siae) { // if it's too large, then quietly try to parse as a float instead try { return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); @@ -225,7 +226,7 @@ public Object visitQualifiedIntegerLiteral(EsqlBaseParser.QualifiedIntegerLitera try { TemporalAmount quantity = parseTemporalAmout(value, qualifier, source); return new Literal(source, quantity, quantity instanceof Duration ? TIME_DURATION : DATE_PERIOD); - } catch (QlIllegalArgumentException | ArithmeticException e) { + } catch (InvalidArgumentException | ArithmeticException e) { // the range varies by unit: Duration#ofMinutes(), #ofHours() will Math#multiplyExact() to reduce the unit to seconds; // and same for Period#ofWeeks() throw new ParsingException(source, "Number [{}] outside of [{}] range", ctx.integerValue().getText(), qualifier); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index 1cb71d64d5548..6779e25b88511 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.parser; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.tree.Source; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 963b92c048382..9a76bc0865865 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -46,6 +46,8 @@ import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -111,6 +113,7 @@ * drivers that are used to execute the given plan. */ public class LocalExecutionPlanner { + private static final Logger logger = LogManager.getLogger(LocalExecutionPlanner.class); private final String sessionId; private final CancellableTask parentTask; @@ -813,6 +816,7 @@ public List createDrivers(String sessionId) { try { for (DriverFactory df : driverFactories) { for (int i = 0; i < df.driverParallelism.instanceCount; i++) { + logger.trace("building {} {}", i, df); drivers.add(df.driverSupplier.apply(sessionId)); } } @@ -820,7 +824,7 @@ public List createDrivers(String sessionId) { return drivers; } finally { if (success == false) { - Releasables.close(() -> Releasables.close(drivers)); + Releasables.close(Releasables.wrap(drivers)); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java index f303fc5a7e047..3bfc8385bbb8a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.planner; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -19,8 +18,4 @@ public PhysicalVerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java index d4964f14b3fca..4961efd7253ec 100644 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java +++ b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java @@ -25,8 +25,7 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestGraphActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestGraphActionTests extends RestActionTestCase { private final List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java index c4c91dac9a513..7a279e367a0e8 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.idp; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -68,7 +66,6 @@ public class IdentityProviderPlugin extends Plugin implements ActionPlugin { private static final Setting ENABLED_SETTING = Setting.boolSetting("xpack.idp.enabled", false, Setting.Property.NodeScope); - private final Logger logger = LogManager.getLogger(IdentityProviderPlugin.class); private boolean enabled; private Settings settings; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java index f3634f96ed7f0..d86268db44f64 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java @@ -28,7 +28,7 @@ public class TransportDeleteSamlServiceProviderAction extends HandledTransportAc DeleteSamlServiceProviderRequest, DeleteSamlServiceProviderResponse> { - private final Logger logger = LogManager.getLogger(TransportDeleteSamlServiceProviderAction.class); + private static final Logger logger = LogManager.getLogger(TransportDeleteSamlServiceProviderAction.class); private final SamlServiceProviderIndex index; @Inject diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportPutSamlServiceProviderAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportPutSamlServiceProviderAction.java index 31bcf6f428fe2..64193d7e1778a 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportPutSamlServiceProviderAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportPutSamlServiceProviderAction.java @@ -35,7 +35,7 @@ public class TransportPutSamlServiceProviderAction extends HandledTransportActio PutSamlServiceProviderRequest, PutSamlServiceProviderResponse> { - private final Logger logger = LogManager.getLogger(TransportPutSamlServiceProviderAction.class); + private static final Logger logger = LogManager.getLogger(TransportPutSamlServiceProviderAction.class); private final SamlServiceProviderIndex index; private final SamlIdentityProvider identityProvider; private final Clock clock; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportSamlInitiateSingleSignOnAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportSamlInitiateSingleSignOnAction.java index 9e2826ed572be..a41569920ecf8 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportSamlInitiateSingleSignOnAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportSamlInitiateSingleSignOnAction.java @@ -38,7 +38,7 @@ public class TransportSamlInitiateSingleSignOnAction extends HandledTransportAct SamlInitiateSingleSignOnRequest, SamlInitiateSingleSignOnResponse> { - private final Logger logger = LogManager.getLogger(TransportSamlInitiateSingleSignOnAction.class); + private static final Logger logger = LogManager.getLogger(TransportSamlInitiateSingleSignOnAction.class); private final SecurityContext securityContext; private final SamlIdentityProvider identityProvider; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java index 9d5e7cae6ad31..09635557d0e0f 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java @@ -63,7 +63,7 @@ public class ApplicationActionsResolver extends AbstractLifecycleComponent { Setting.Property.NodeScope ); - private final Logger logger = LogManager.getLogger(ApplicationActionsResolver.class); + private static final Logger logger = LogManager.getLogger(ApplicationActionsResolver.class); private final ServiceProviderDefaults defaults; private final Client client; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java index 4ee2b91c5f2f7..eeb5e91f29ced 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java @@ -62,7 +62,7 @@ public static UserPrivileges noAccess(String principal) { } } - private final Logger logger = LogManager.getLogger(UserPrivilegeResolver.class); + private static final Logger logger = LogManager.getLogger(UserPrivilegeResolver.class); private final Client client; private final SecurityContext securityContext; private final ApplicationActionsResolver actionsResolver; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java index 5c83da57f84a8..0313040b7e8ae 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java @@ -60,7 +60,7 @@ public class SamlAuthnRequestValidator { private final SamlFactory samlFactory; private final SamlIdentityProvider idp; - private final Logger logger = LogManager.getLogger(SamlAuthnRequestValidator.class); + private static final Logger logger = LogManager.getLogger(SamlAuthnRequestValidator.class); private static final String[] XSD_FILES = new String[] { "/org/elasticsearch/xpack/idp/saml/support/saml-schema-protocol-2.0.xsd", "/org/elasticsearch/xpack/idp/saml/support/saml-schema-assertion-2.0.xsd", diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SuccessfulAuthenticationResponseMessageBuilder.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SuccessfulAuthenticationResponseMessageBuilder.java index c59d99c7171c4..5d8cbf2607338 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SuccessfulAuthenticationResponseMessageBuilder.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SuccessfulAuthenticationResponseMessageBuilder.java @@ -53,7 +53,7 @@ */ public class SuccessfulAuthenticationResponseMessageBuilder { - private final Logger logger = LogManager.getLogger(SuccessfulAuthenticationResponseMessageBuilder.class); + private static final Logger logger = LogManager.getLogger(SuccessfulAuthenticationResponseMessageBuilder.class); private final Clock clock; private final SamlIdentityProvider idp; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlIdentityProvider.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlIdentityProvider.java index e50c83a9af063..76bbcf6d2e0b5 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlIdentityProvider.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlIdentityProvider.java @@ -33,7 +33,7 @@ */ public class SamlIdentityProvider { - private final Logger logger = LogManager.getLogger(SamlIdentityProvider.class); + private static final Logger logger = LogManager.getLogger(SamlIdentityProvider.class); private final String entityId; private final Map ssoEndpoints; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java index fa30825651a61..dc4a9edbf22f4 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java @@ -34,7 +34,7 @@ public class SamlMetadataGenerator { private final SamlFactory samlFactory; private final SamlIdentityProvider idp; - private final Logger logger = LogManager.getLogger(SamlMetadataGenerator.class); + private static final Logger logger = LogManager.getLogger(SamlMetadataGenerator.class); public SamlMetadataGenerator(SamlFactory samlFactory, SamlIdentityProvider idp) { this.samlFactory = samlFactory; diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java index 558ac9a20dc0c..2291061af3e98 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java @@ -65,7 +65,7 @@ */ public class SamlServiceProviderIndex implements Closeable { - private final Logger logger = LogManager.getLogger(SamlServiceProviderIndex.class); + private static final Logger logger = LogManager.getLogger(SamlServiceProviderIndex.class); private final Client client; private final ClusterService clusterService; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java index 35078a42adf19..bb75584a9cf75 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java @@ -751,7 +751,8 @@ static List migrateComponentTemplates(Metadata.Builder mb, ClusterState ComponentTemplate migratedComponentTemplate = new ComponentTemplate( migratedInnerTemplate, componentTemplate.version(), - componentTemplate.metadata() + componentTemplate.metadata(), + componentTemplate.deprecated() ); mb.put(componentEntry.getKey(), migratedComponentTemplate); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java index 102436b37524c..f9e6eef5ffcc7 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java @@ -65,6 +65,11 @@ public TestInferenceService(InferenceServiceFactoryContext context) { public String name() { return NAME; } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } } public static class TestInferenceServiceClusterService extends TestInferenceServiceBase { @@ -83,6 +88,11 @@ public boolean isInClusterService() { public String name() { return NAME; } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } } public abstract static class TestInferenceServiceBase implements InferenceService { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 3ef93c6c275d8..42ad64b9c60a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -8,10 +8,14 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import java.util.ArrayList; import java.util.List; @@ -23,6 +27,9 @@ private InferenceNamedWriteablesProvider() {} public static List getNamedWriteables() { List namedWriteables = new ArrayList<>(); + // Empty default settings + namedWriteables.add(new NamedWriteableRegistry.Entry(EmptyTaskSettings.class, EmptyTaskSettings.NAME, EmptyTaskSettings::new)); + // ELSER config namedWriteables.add( new NamedWriteableRegistry.Entry(ServiceSettings.class, ElserMlNodeServiceSettings.NAME, ElserMlNodeServiceSettings::new) @@ -31,6 +38,18 @@ public static List getNamedWriteables() { new NamedWriteableRegistry.Entry(TaskSettings.class, ElserMlNodeTaskSettings.NAME, ElserMlNodeTaskSettings::new) ); + // Hugging Face ELSER config + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + HuggingFaceElserServiceSettings.NAME, + HuggingFaceElserServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(SecretSettings.class, HuggingFaceElserSecretSettings.NAME, HuggingFaceElserSecretSettings::new) + ); + return namedWriteables; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 2f0f95cf8a911..393cbd0413e5f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -41,12 +41,14 @@ import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import java.util.Collection; import java.util.List; @@ -62,6 +64,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, InferenceSe private final SetOnce httpRequestSenderFactory = new SetOnce<>(); // We'll keep a reference to the http manager just in case the inference services don't get closed individually private final SetOnce httpManager = new SetOnce<>(); + private final SetOnce throttlerManager = new SetOnce<>(); public InferencePlugin(Settings settings) { this.settings = settings; @@ -97,7 +100,9 @@ public List getRestHandlers( @Override public Collection createComponents(PluginServices services) { - httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService())); + throttlerManager.set(new ThrottlerManager(settings, services.threadPool(), services.clusterService())); + + httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager.get())); httpRequestSenderFactory.set( new HttpRequestSenderFactory(services.threadPool(), httpManager.get(), services.clusterService(), settings) ); @@ -138,7 +143,7 @@ public List> getExecutorBuilders(Settings settingsToUse) { new ScalingExecutorBuilder( UTILITY_THREAD_POOL_NAME, 0, - 1, + 10, TimeValue.timeValueMinutes(10), false, "xpack.inference.utility_thread_pool" @@ -151,7 +156,8 @@ public List> getSettings() { return Stream.of( HttpSettings.getSettings(), HttpClientManager.getSettings(), - HttpRequestSenderFactory.HttpRequestSender.getSettings() + HttpRequestSenderFactory.HttpRequestSender.getSettings(), + ThrottlerManager.getSettings() ).flatMap(Collection::stream).collect(Collectors.toList()); } @@ -167,7 +173,7 @@ public String getFeatureDescription() { @Override public List getInferenceServiceFactories() { - return List.of(ElserMlNodeService::new); + return List.of(ElserMlNodeService::new, context -> new HuggingFaceElserService(httpRequestSenderFactory, throttlerManager)); } @Override @@ -177,8 +183,6 @@ public List getInferenceServiceNamedWriteables() { @Override public void close() { - if (httpManager.get() != null) { - IOUtils.closeWhileHandlingException(httpManager.get()); - } + IOUtils.closeWhileHandlingException(httpManager.get(), throttlerManager.get()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 046eff3e6b830..569d4e023928b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -41,10 +43,14 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.core.Strings.format; + public class TransportPutInferenceModelAction extends TransportMasterNodeAction< PutInferenceModelAction.Request, PutInferenceModelAction.Response> { + private static final Logger logger = LogManager.getLogger(TransportPutInferenceModelAction.class); + private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; private final Client client; @@ -97,6 +103,30 @@ protected void masterOperation( return; } + // Check if all the nodes in this cluster know about the service + if (service.get().getMinimalSupportedVersion().after(state.getMinTransportVersion())) { + logger.warn( + format( + "Service [%s] requires version [%s] but minimum cluster version is [%s]", + serviceName, + service.get().getMinimalSupportedVersion(), + state.getMinTransportVersion() + ) + ); + + listener.onFailure( + new ElasticsearchStatusException( + format( + "All nodes in the cluster are not aware of the service [%s]." + + "Wait for the cluster to finish upgrading and try again.", + serviceName + ), + RestStatus.BAD_REQUEST + ) + ); + return; + } + if (service.get().isInClusterService()) { // Find the cluster platform as the service may need that // information when creating the model diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java new file mode 100644 index 0000000000000..bc52a04ab7209 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; + +/** + * Defines an inference request to a 3rd party service. The success or failure response is communicated through the provided listener. + */ +public interface ExecutableAction { + void execute(String input, ActionListener listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java new file mode 100644 index 0000000000000..acc3ab57ce9eb --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceClient; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; + +public class HuggingFaceElserAction implements ExecutableAction { + + private final HuggingFaceAccount account; + private final HuggingFaceClient client; + + public HuggingFaceElserAction(Sender sender, HuggingFaceElserModel model, ThrottlerManager throttlerManager) { + this.client = new HuggingFaceClient(sender, throttlerManager); + this.account = new HuggingFaceAccount(model.getServiceSettings().uri(), model.getSecretSettings().apiKey()); + } + + public void execute(String input, ActionListener listener) { + try { + HuggingFaceElserRequest request = new HuggingFaceElserRequest(account, new HuggingFaceElserRequestEntity(input)); + + client.send(request, listener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure( + new ElasticsearchStatusException("Failed to send request ELSER Hugging Face request", RestStatus.INTERNAL_SERVER_ERROR, e) + ); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java index 125ff7ae047ac..1dac8153da4f1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java @@ -19,9 +19,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.CancellationException; import java.util.concurrent.atomic.AtomicReference; @@ -41,11 +43,17 @@ enum Status { private final AtomicReference status = new AtomicReference<>(Status.CREATED); private final ThreadPool threadPool; private final HttpSettings settings; + private final ThrottlerManager throttlerManager; - public static HttpClient create(HttpSettings settings, ThreadPool threadPool, PoolingNHttpClientConnectionManager connectionManager) { - CloseableHttpAsyncClient client = createAsyncClient(connectionManager); + public static HttpClient create( + HttpSettings settings, + ThreadPool threadPool, + PoolingNHttpClientConnectionManager connectionManager, + ThrottlerManager throttlerManager + ) { + CloseableHttpAsyncClient client = createAsyncClient(Objects.requireNonNull(connectionManager)); - return new HttpClient(settings, client, threadPool); + return new HttpClient(settings, client, threadPool, throttlerManager); } private static CloseableHttpAsyncClient createAsyncClient(PoolingNHttpClientConnectionManager connectionManager) { @@ -59,10 +67,11 @@ private static CloseableHttpAsyncClient createAsyncClient(PoolingNHttpClientConn } // Default for testing - HttpClient(HttpSettings settings, CloseableHttpAsyncClient asyncClient, ThreadPool threadPool) { - this.settings = settings; - this.threadPool = threadPool; - this.client = asyncClient; + HttpClient(HttpSettings settings, CloseableHttpAsyncClient asyncClient, ThreadPool threadPool, ThrottlerManager throttlerManager) { + this.settings = Objects.requireNonNull(settings); + this.threadPool = Objects.requireNonNull(threadPool); + this.client = Objects.requireNonNull(asyncClient); + this.throttlerManager = Objects.requireNonNull(throttlerManager); } public void start() { @@ -83,7 +92,7 @@ public void completed(HttpResponse response) { @Override public void failed(Exception ex) { - logger.warn(format("Request [%s] failed", request.getRequestLine()), ex); + throttlerManager.getThrottler().warn(logger, format("Request [%s] failed", request.getRequestLine()), ex); failUsingUtilityThread(ex, listener); } @@ -99,7 +108,7 @@ private void respondUsingUtilityThread(HttpResponse response, HttpUriRequest req try { listener.onResponse(HttpResult.create(settings.getMaxResponseSize(), response)); } catch (Exception e) { - logger.warn(format("Failed to create http result for [%s]", request.getRequestLine()), e); + throttlerManager.getThrottler().warn(logger, format("Failed to create http result for [%s]", request.getRequestLine()), e); listener.onFailure(e); } }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index 862170a229b41..494e0f7c60dff 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -9,6 +9,7 @@ import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.apache.http.impl.nio.reactor.IOReactorConfig; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.logging.log4j.LogManager; @@ -19,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; import java.io.IOException; @@ -43,7 +45,7 @@ public class HttpClientManager implements Closeable { Setting.Property.Dynamic ); - private static final TimeValue DEFAULT_CONNECTION_EVICTION_THREAD_INTERVAL_TIME = TimeValue.timeValueSeconds(10); + private static final TimeValue DEFAULT_CONNECTION_EVICTION_THREAD_INTERVAL_TIME = TimeValue.timeValueMinutes(1); public static final Setting CONNECTION_EVICTION_THREAD_INTERVAL_SETTING = Setting.timeSetting( "xpack.inference.http.connection_eviction_interval", DEFAULT_CONNECTION_EVICTION_THREAD_INTERVAL_TIME, @@ -65,9 +67,14 @@ public class HttpClientManager implements Closeable { private IdleConnectionEvictor connectionEvictor; private final HttpClient httpClient; - public static HttpClientManager create(Settings settings, ThreadPool threadPool, ClusterService clusterService) { + public static HttpClientManager create( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + ThrottlerManager throttlerManager + ) { PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(); - return new HttpClientManager(settings, connectionManager, threadPool, clusterService); + return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } // Default for testing @@ -75,14 +82,15 @@ public static HttpClientManager create(Settings settings, ThreadPool threadPool, Settings settings, PoolingNHttpClientConnectionManager connectionManager, ThreadPool threadPool, - ClusterService clusterService + ClusterService clusterService, + ThrottlerManager throttlerManager ) { this.threadPool = threadPool; this.connectionManager = connectionManager; setMaxConnections(MAX_CONNECTIONS.get(settings)); - this.httpClient = HttpClient.create(new HttpSettings(settings, clusterService), threadPool, connectionManager); + this.httpClient = HttpClient.create(new HttpSettings(settings, clusterService), threadPool, connectionManager, throttlerManager); evictorSettings = new EvictorSettings(settings); connectionEvictor = createConnectionEvictor(); @@ -93,7 +101,8 @@ public static HttpClientManager create(Settings settings, ThreadPool threadPool, private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { - ioReactor = new DefaultConnectingIOReactor(); + var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); + ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); } catch (IOReactorException e) { var message = "Failed to initialize the inference http client manager"; logger.error(message, e); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java index 0635b4d4d8b3b..328afb264c4ab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java @@ -21,13 +21,18 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Objects; -import java.util.concurrent.AbstractExecutorService; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -44,7 +49,7 @@ * attempting to execute a task (aka waiting for the connection manager to lease a connection). See * {@link org.apache.http.client.config.RequestConfig.Builder#setConnectionRequestTimeout} for more info. */ -class HttpRequestExecutorService extends AbstractExecutorService { +class HttpRequestExecutorService implements ExecutorService { private static final Logger logger = LogManager.getLogger(HttpRequestExecutorService.class); private final String serviceName; @@ -232,4 +237,61 @@ public void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionLis public void execute(Runnable runnable) { throw new UnsupportedOperationException("use send instead"); } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public Future submit(Callable task) { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public Future submit(Runnable task, T result) { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public Future submit(Runnable task) { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public List> invokeAll(Collection> tasks) throws InterruptedException { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) throws InterruptedException { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public T invokeAny(Collection> tasks) throws InterruptedException, ExecutionException { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) throws InterruptedException, + ExecutionException, TimeoutException { + throw new UnsupportedOperationException("use send instead"); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java index ce99e19512488..40adc9c4a8bea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; -import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Objects; @@ -59,7 +58,7 @@ public HttpRequestSender createSender(String serviceName) { * A class for providing a more friendly interface for sending an {@link HttpUriRequest}. This leverages the queuing logic for sending * a request. */ - public static final class HttpRequestSender implements Closeable { + public static final class HttpRequestSender implements Sender { private static final Logger logger = LogManager.getLogger(HttpRequestSender.class); /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java new file mode 100644 index 0000000000000..abef521c77fc6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.Closeable; + +public interface Sender extends Closeable { + void start(); + + void send(HttpRequestBase request, ActionListener listener); + + void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionListener listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java new file mode 100644 index 0000000000000..771c7b6adaead --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.elasticsearch.common.settings.SecureString; + +import java.net.URI; +import java.util.Objects; + +public record HuggingFaceAccount(URI url, SecureString apiKey) { + + public HuggingFaceAccount { + Objects.requireNonNull(url); + Objects.requireNonNull(apiKey); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java new file mode 100644 index 0000000000000..ed6e5c200b367 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; + +import static org.elasticsearch.core.Strings.format; + +public class HuggingFaceClient { + private static final Logger logger = LogManager.getLogger(HuggingFaceClient.class); + + private final ThrottlerManager throttlerManager; + + private final Sender sender; + + public HuggingFaceClient(Sender sender, ThrottlerManager throttlerManager) { + this.sender = sender; + this.throttlerManager = throttlerManager; + } + + public void send(HuggingFaceElserRequest request, ActionListener listener) throws IOException { + HttpRequestBase httpRequest = request.createRequest(); + ActionListener responseListener = ActionListener.wrap(response -> { + try { + listener.onResponse(HuggingFaceElserResponseEntity.fromResponse(response)); + } catch (Exception e) { + String msg = format("Failed to parse the Hugging Face ELSER response for request [%s]", httpRequest.getRequestLine()); + throttlerManager.getThrottler().warn(logger, msg, e); + listener.onFailure(new ElasticsearchException(msg, e)); + } + }, listener::onFailure); + + sender.send(httpRequest, responseListener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java new file mode 100644 index 0000000000000..91ebfe0e3478e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request; + +import org.apache.http.client.methods.HttpRequestBase; + +public interface Request { + HttpRequestBase createRequest(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java new file mode 100644 index 0000000000000..f896bba4ae063 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.apache.http.Header; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +public class HuggingFaceElserRequest implements Request { + + private final HuggingFaceAccount account; + private final HuggingFaceElserRequestEntity entity; + + public HuggingFaceElserRequest(HuggingFaceAccount account, HuggingFaceElserRequestEntity entity) { + this.account = Objects.requireNonNull(account); + this.entity = Objects.requireNonNull(entity); + } + + public HttpRequestBase createRequest() { + HttpPost httpPost = new HttpPost(account.url()); + + ByteArrayEntity byteEntity = new ByteArrayEntity(Strings.toString(entity).getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaTypeWithoutParameters()); + httpPost.setHeader(apiKeyHeader()); + + return httpPost; + } + + private Header apiKeyHeader() { + return new BasicHeader(HttpHeaders.AUTHORIZATION, "Bearer " + account.apiKey().toString()); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java new file mode 100644 index 0000000000000..f21bee923ecab --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public record HuggingFaceElserRequestEntity(String inputs) implements ToXContentObject { + + private static final String INPUTS_FIELD = "inputs"; + + public HuggingFaceElserRequestEntity { + Objects.requireNonNull(inputs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(INPUTS_FIELD, inputs); + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java new file mode 100644 index 0000000000000..2ac9eb44ed7fb --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; + +public class HuggingFaceElserResponseEntity { + + /** + * The response from hugging face will be formatted as [{"token": 0.0...123}]. Each object within the array will correspond to the + * item within the inputs array within the request sent to hugging face. For example for a request like: + * + *
+     *     
+     *        {
+     *            "inputs": ["hello this is my name", "I wish I was there!"]
+     *        }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *   
+     *     [
+     *       {
+     *         "the": 0.7226026,
+     *         "to": 0.29198948,
+     *         "is": 0.059944477,
+     *         ...
+     *       },
+     *       {
+     *           "wish": 0.123456,
+     *           ...
+     *       }
+     *     ]
+     *   
+     * 
+ */ + public static TextExpansionResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + if (jsonParser.currentToken() == null) { + jsonParser.nextToken(); + } + + List parsedResponse = XContentParserUtils.parseList( + jsonParser, + HuggingFaceElserResponseEntity::parseExpansionResult + ); + + if (parsedResponse.isEmpty()) { + return new TextExpansionResults(DEFAULT_RESULTS_FIELD, Collections.emptyList(), false); + } + + // we only handle a single response right now so just grab the first one + return parsedResponse.get(0); + } + } + + private static TextExpansionResults parseExpansionResult(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + + List weightedTokens = new ArrayList<>(); + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + var floatToken = parser.nextToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, floatToken, parser); + + weightedTokens.add(new TextExpansionResults.WeightedToken(parser.currentName(), parser.floatValue())); + } + // TODO how do we know if the tokens were truncated so we can set this appropriately? + // This will depend on whether we handle the tokenization or hugging face + return new TextExpansionResults(DEFAULT_RESULTS_FIELD, weightedTokens, false); + } + + private HuggingFaceElserResponseEntity() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java new file mode 100644 index 0000000000000..b1dee15a93bd7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +/** + * A class that throttles calls to a logger. If a log call is made during the throttle period a counter is incremented. + * If a log call occurs after the throttle period, then the call will proceed, and it will include a message like + * "repeated X times" to indicate how often the message was attempting to be logged. + */ +public class Throttler implements Closeable { + + private static final Logger classLogger = LogManager.getLogger(Throttler.class); + + private final TimeValue resetInterval; + private Duration durationToWait; + private final Clock clock; + private final ConcurrentMap logExecutors; + private final AtomicReference cancellableTask = new AtomicReference<>(); + private final AtomicBoolean isRunning = new AtomicBoolean(true); + + /** + * Constructs the throttler and kicks of a scheduled tasks to clear the internal stats. + * + * @param resetInterval the frequency for clearing the internal stats. This protects against an ever growing + * cache + * @param durationToWait the amount of time to wait before logging a message after the threshold + * is reached + * @param threadPool a thread pool for running a scheduled task to clear the internal stats + */ + public Throttler(TimeValue resetInterval, TimeValue durationToWait, ThreadPool threadPool) { + this(resetInterval, durationToWait, Clock.systemUTC(), threadPool, new ConcurrentHashMap<>()); + } + + /** + * This should only be used directly for testing. + */ + Throttler( + TimeValue resetInterval, + TimeValue durationToWait, + Clock clock, + ThreadPool threadPool, + ConcurrentMap logExecutors + ) { + Objects.requireNonNull(durationToWait); + Objects.requireNonNull(threadPool); + + this.resetInterval = Objects.requireNonNull(resetInterval); + this.durationToWait = Duration.ofMillis(durationToWait.millis()); + this.clock = Objects.requireNonNull(clock); + this.logExecutors = Objects.requireNonNull(logExecutors); + + this.cancellableTask.set(startResetTask(threadPool)); + } + + private Scheduler.Cancellable startResetTask(ThreadPool threadPool) { + classLogger.debug(() -> format("Reset task scheduled with interval [%s]", resetInterval)); + + return threadPool.scheduleWithFixedDelay(logExecutors::clear, resetInterval, threadPool.executor(UTILITY_THREAD_POOL_NAME)); + } + + public void setDurationToWait(TimeValue durationToWait) { + this.durationToWait = Duration.ofMillis(durationToWait.millis()); + } + + public void warn(Logger logger, String message, Throwable e) { + Objects.requireNonNull(message); + Objects.requireNonNull(e); + + if (isRunning.get()) { + logHelper(message, msgToAppend -> logger.warn(message.concat(msgToAppend), e)); + } + } + + private void logHelper(String message, Consumer executor) { + LogExecutor logExecutor = logExecutors.compute(message, (key, value) -> { + if (value == null) { + return new LogExecutor(clock, executor); + } + + return value.compute(executor, durationToWait); + }); + + logExecutor.log(); + } + + @Override + public void close() { + isRunning.set(false); + cancellableTask.get().cancel(); + logExecutors.clear(); + } + + private static class LogExecutor { + private final long skippedLogCalls; + private final Instant timeOfLastLogCall; + private final Clock clock; + private final Runnable logRunner; + + LogExecutor(Clock clock, Consumer logAppendedMessage) { + skippedLogCalls = 0; + timeOfLastLogCall = Instant.now(clock); + this.clock = clock; + // The first log message can log the original message without waiting + this.logRunner = () -> logAppendedMessage.accept(""); + } + + LogExecutor(Clock clock, long skippedLogCalls, Runnable logRunner) { + this.skippedLogCalls = skippedLogCalls; + timeOfLastLogCall = Instant.now(clock); + this.clock = clock; + this.logRunner = logRunner; + } + + void log() { + this.logRunner.run(); + } + + LogExecutor compute(Consumer executor, Duration durationToWait) { + if (hasDurationExpired(durationToWait)) { + String msg = ""; + if (this.skippedLogCalls == 1) { + msg = ", repeated 1 time"; + } else if (this.skippedLogCalls > 1) { + msg = format(", repeated %s times", this.skippedLogCalls); + } + + String finalMsg = msg; + return new LogExecutor(this.clock, 0, () -> executor.accept(finalMsg)); + } + + return new LogExecutor(this.clock, this.skippedLogCalls + 1, () -> {}); + } + + private boolean hasDurationExpired(Duration durationToWait) { + Instant now = Instant.now(clock); + return now.isAfter(timeOfLastLogCall.plus(durationToWait)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java new file mode 100644 index 0000000000000..6c38c341a0401 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.util.List; +import java.util.Objects; + +/** + * This class manages the settings for a {@link Throttler}. + */ +public class ThrottlerManager implements Closeable { + private static final TimeValue DEFAULT_STATS_RESET_INTERVAL_TIME = TimeValue.timeValueDays(1); + /** + * A setting specifying the interval for clearing the cached log message stats + */ + public static final Setting STATS_RESET_INTERVAL_SETTING = Setting.timeSetting( + "xpack.inference.logging.reset_interval", + DEFAULT_STATS_RESET_INTERVAL_TIME, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final TimeValue DEFAULT_WAIT_DURATION_TIME = TimeValue.timeValueHours(1); + /** + * A setting specifying the amount of time to wait after a log call occurs before allowing another log call. + */ + public static final Setting LOGGER_WAIT_DURATION_SETTING = Setting.timeSetting( + "xpack.inference.logging.wait_duration", + DEFAULT_WAIT_DURATION_TIME, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private final ThreadPool threadPool; + private Throttler throttler; + private LoggerSettings loggerSettings; + + public ThrottlerManager(Settings settings, ThreadPool threadPool, ClusterService clusterService) { + Objects.requireNonNull(settings); + Objects.requireNonNull(clusterService); + + this.threadPool = Objects.requireNonNull(threadPool); + this.loggerSettings = LoggerSettings.fromSettings(settings); + + throttler = new Throttler(loggerSettings.resetInterval(), loggerSettings.waitDuration(), threadPool); + this.addSettingsUpdateConsumers(clusterService); + } + + private void addSettingsUpdateConsumers(ClusterService clusterService) { + clusterService.getClusterSettings().addSettingsUpdateConsumer(STATS_RESET_INTERVAL_SETTING, this::setResetInterval); + clusterService.getClusterSettings().addSettingsUpdateConsumer(LOGGER_WAIT_DURATION_SETTING, this::setWaitDuration); + } + + // default for testing + void setWaitDuration(TimeValue waitDuration) { + loggerSettings = loggerSettings.createWithWaitDuration(waitDuration); + + throttler.setDurationToWait(waitDuration); + } + + // default for testing + void setResetInterval(TimeValue resetInterval) { + loggerSettings = loggerSettings.createWithResetInterval(resetInterval); + + throttler.close(); + throttler = new Throttler(loggerSettings.resetInterval(), loggerSettings.waitDuration(), threadPool); + } + + public Throttler getThrottler() { + return throttler; + } + + @Override + public void close() { + throttler.close(); + } + + public static List> getSettings() { + return List.of(STATS_RESET_INTERVAL_SETTING, LOGGER_WAIT_DURATION_SETTING); + } + + private record LoggerSettings(TimeValue resetInterval, TimeValue waitDuration) { + LoggerSettings { + Objects.requireNonNull(resetInterval); + Objects.requireNonNull(waitDuration); + } + + static LoggerSettings fromSettings(Settings settings) { + return new LoggerSettings(STATS_RESET_INTERVAL_SETTING.get(settings), LOGGER_WAIT_DURATION_SETTING.get(settings)); + } + + LoggerSettings createWithResetInterval(TimeValue resetInterval) { + return new LoggerSettings(resetInterval, waitDuration); + } + + LoggerSettings createWithWaitDuration(TimeValue waitDuration) { + return new LoggerSettings(resetInterval, waitDuration); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java index 31228b645cff2..0849e8fa53cf5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java @@ -61,7 +61,7 @@ public static void throwIfNotEmptyMap(Map settingsMap, String se } public static ElasticsearchStatusException unknownSettingsError(Map config, String serviceName) { - // TOOD map as JSON + // TODO map as JSON return new ElasticsearchStatusException( "Model configuration contains settings [{}] unknown to the [{}] service", RestStatus.BAD_REQUEST, @@ -73,4 +73,12 @@ public static ElasticsearchStatusException unknownSettingsError(Map map) { + ValidationException validationException = new ValidationException(); + + String apiToken = MapParsingUtils.removeAsType(map, API_KEY, String.class); + + if (apiToken == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(API_KEY, ModelSecrets.SECRET_SETTINGS)); + } else if (apiToken.isEmpty()) { + validationException.addValidationError(MapParsingUtils.mustBeNonEmptyString(API_KEY, ModelSecrets.SECRET_SETTINGS)); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + SecureString secureApiToken = new SecureString(Objects.requireNonNull(apiToken).toCharArray()); + + return new HuggingFaceElserSecretSettings(secureApiToken); + } + + public HuggingFaceElserSecretSettings { + Objects.requireNonNull(apiKey); + } + + public HuggingFaceElserSecretSettings(StreamInput in) throws IOException { + this(in.readSecureString()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(API_KEY, apiKey.toString()); + builder.endObject(); + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeSecureString(apiKey); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java new file mode 100644 index 0000000000000..e25315b6bbaf0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceElserAction; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; + +public class HuggingFaceElserService implements InferenceService { + public static final String NAME = "hugging_face_elser"; + + private final SetOnce factory; + private final SetOnce throttlerManager; + private final AtomicReference sender = new AtomicReference<>(); + // This is initialized once which assumes that the settings will not change. To change the service, it + // should be deleted and then added again + private final AtomicReference action = new AtomicReference<>(); + + public HuggingFaceElserService(SetOnce factory, SetOnce throttlerManager) { + this.factory = Objects.requireNonNull(factory); + this.throttlerManager = Objects.requireNonNull(throttlerManager); + } + + @Override + public String name() { + return NAME; + } + + @Override + public HuggingFaceElserModel parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); + HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(serviceSettingsMap); + + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + + return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + } + + @Override + public HuggingFaceElserModel parsePersistedConfig( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + + HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); + HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(secretSettingsMap); + + return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + } + + @Override + public void infer(Model model, String input, Map taskSettings, ActionListener listener) { + if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { + listener.onFailure( + new ElasticsearchStatusException( + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), + RestStatus.BAD_REQUEST + ) + ); + return; + } + + try { + init(model); + } catch (Exception e) { + listener.onFailure(new ElasticsearchException("Failed to initialize service", e)); + return; + } + + action.get().execute(input, listener); + } + + @Override + public void start(Model model, ActionListener listener) { + try { + init(model); + sender.get().start(); + listener.onResponse(true); + } catch (Exception e) { + listener.onFailure(new ElasticsearchException("Failed to start service", e)); + } + } + + @Override + public void close() throws IOException { + IOUtils.closeWhileHandlingException(sender.get()); + } + + private void init(Model model) { + if (model instanceof HuggingFaceElserModel == false) { + throw new IllegalArgumentException("The internal model was invalid"); + } + + sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); + + HuggingFaceElserModel huggingFaceElserModel = (HuggingFaceElserModel) model; + action.updateAndGet( + current -> Objects.requireNonNullElseGet( + current, + () -> new HuggingFaceElserAction(sender.get(), huggingFaceElserModel, throttlerManager.get()) + ) + ); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java new file mode 100644 index 0000000000000..13f66562f6f83 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.MapParsingUtils; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.core.Strings.format; + +public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSettings { + public static final String NAME = "hugging_face_elser_service_settings"; + + private static final Logger logger = LogManager.getLogger(HuggingFaceElserServiceSettings.class); + static final String URL = "url"; + + public static HuggingFaceElserServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String parsedUrl = MapParsingUtils.removeAsType(map, URL, String.class); + URI uri = convertToUri(parsedUrl, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new HuggingFaceElserServiceSettings(uri); + } + + private static URI convertToUri(String url, ValidationException validationException) { + if (url == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(URL, ModelConfigurations.SERVICE_SETTINGS)); + return null; + } + + try { + return createUri(url); + } catch (IllegalArgumentException ignored) { + validationException.addValidationError(MapParsingUtils.invalidUrlErrorMsg(url, ModelConfigurations.SERVICE_SETTINGS)); + return null; + } + } + + // TODO move this to a common location and potentially improve parsing errors + private static URI createUri(String url) throws IllegalArgumentException { + Objects.requireNonNull(url); + + try { + return new URI(url); + } catch (URISyntaxException e) { + logger.info(format("Invalid URL received [%s]", url), e); + throw new IllegalArgumentException(format("unable to parse url [%s]", url), e); + } + } + + public HuggingFaceElserServiceSettings { + Objects.requireNonNull(uri); + } + + public HuggingFaceElserServiceSettings(String url) { + this(createUri(url)); + } + + public HuggingFaceElserServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(URL, uri.toString()); + builder.endObject(); + + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java new file mode 100644 index 0000000000000..5a51e89f57e11 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class EmptyTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static EmptyTaskSettings createRandom() { + return EmptyTaskSettings.INSTANCE; // no options to randomise + } + + @Override + protected Writeable.Reader instanceReader() { + return EmptyTaskSettings::new; + } + + @Override + protected EmptyTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected EmptyTaskSettings mutateInstance(EmptyTaskSettings instance) { + return null; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java new file mode 100644 index 0000000000000..713312204e65b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class HuggingFaceElserActionTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(getTestName()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + [ + { + ".": 0.133155956864357 + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute("abc", listener); + + InferenceResults result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(Map.of(DEFAULT_RESULTS_FIELD, Map.of(".", 0.13315596f)))); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + } + + public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { + try (var sender = mock(Sender.class)) { + var thrownException = expectThrows(IllegalArgumentException.class, () -> createAction("^^", sender)); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute("abc", listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute("abc", listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Failed to send request ELSER Hugging Face request")); + } + + private HuggingFaceElserAction createAction(String url, Sender sender) { + var model = new HuggingFaceElserModel( + "id", + TaskType.SPARSE_EMBEDDING, + "service", + new HuggingFaceElserServiceSettings(url), + new HuggingFaceElserSecretSettings(new SecureString("secret".toCharArray())) + ); + + return new HuggingFaceElserAction(sender, model, mock(ThrottlerManager.class)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java index dd9a89ae41881..3e07bd773c65e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; @@ -25,7 +26,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterService; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.hamcrest.Matchers.equalTo; @@ -63,7 +64,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { String paramValue = randomAlphaOfLength(3); var httpPost = createHttpPost(webServer.getPort(), paramKey, paramValue); - var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty()); + var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); try (var httpClient = manager.getHttpClient()) { httpClient.start(); @@ -83,7 +84,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { public void testStartsANewEvictor_WithNewEvictionInterval() { var threadPool = mock(ThreadPool.class); - var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty()); + var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); var evictionInterval = TimeValue.timeValueSeconds(1); manager.setEvictionInterval(evictionInterval); @@ -96,7 +97,13 @@ public void test_DoesNotStartANewEvictor_WithNewEvictionMaxIdle() { Settings settings = Settings.builder() .put(HttpClientManager.CONNECTION_EVICTION_THREAD_INTERVAL_SETTING.getKey(), TimeValue.timeValueNanos(1)) .build(); - var manager = new HttpClientManager(settings, mockConnectionManager, threadPool, mockClusterService(settings)); + var manager = new HttpClientManager( + settings, + mockConnectionManager, + threadPool, + mockClusterService(settings), + mock(ThrottlerManager.class) + ); var evictionMaxIdle = TimeValue.timeValueSeconds(1); manager.setEvictionMaxIdle(evictionMaxIdle); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java index e2c1b1f942f8f..c72d9167a9e06 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java @@ -29,8 +29,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.threadpool.ScalingExecutorBuilder; -import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.junit.After; @@ -44,8 +42,9 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterService; +import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -82,7 +81,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { String paramValue = randomAlphaOfLength(3); var httpPost = createHttpPost(webServer.getPort(), paramKey, paramValue); - try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager(), mockThrottlerManager())) { httpClient.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -100,7 +99,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { } public void testSend_ThrowsErrorIfCalledBeforeStart() throws Exception { - try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager(), mockThrottlerManager())) { PlainActionFuture listener = new PlainActionFuture<>(); var thrownException = expectThrows( AssertionError.class, @@ -123,7 +122,7 @@ public void testSend_FailedCallsOnFailure() throws Exception { var httpPost = createHttpPost(webServer.getPort(), "a", "b"); - try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool)) { + try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool, mockThrottlerManager())) { client.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -146,7 +145,7 @@ public void testSend_CancelledCallsOnFailure() throws Exception { var httpPost = createHttpPost(webServer.getPort(), "a", "b"); - try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool)) { + try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool, mockThrottlerManager())) { client.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -164,7 +163,7 @@ public void testStart_MultipleCallsOnlyStartTheClientOnce() throws Exception { var httpPost = createHttpPost(webServer.getPort(), "a", "b"); - try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool)) { + try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool, mockThrottlerManager())) { client.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -187,7 +186,7 @@ public void testSend_FailsWhenMaxBytesReadIsExceeded() throws Exception { Settings settings = Settings.builder().put(HttpSettings.MAX_HTTP_RESPONSE_SIZE.getKey(), ByteSizeValue.ONE).build(); var httpSettings = createHttpSettings(settings); - try (var httpClient = HttpClient.create(httpSettings, threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(httpSettings, threadPool, createConnectionManager(), mockThrottlerManager())) { httpClient.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -218,20 +217,6 @@ public static HttpPost createHttpPost(int port, String paramKey, String paramVal return httpPost; } - public static ThreadPool createThreadPool(String name) { - return new TestThreadPool( - name, - new ScalingExecutorBuilder( - UTILITY_THREAD_POOL_NAME, - 1, - 4, - TimeValue.timeValueMinutes(10), - false, - "xpack.inference.utility_thread_pool" - ) - ); - } - public static PoolingNHttpClientConnectionManager createConnectionManager() throws IOReactorException { return new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java index 0f03003589073..dba80923c487d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java @@ -20,7 +20,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doAnswer; @@ -49,16 +49,18 @@ public void testStart_CallsExecutorSubmit() throws IOReactorException { when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); - var evictor = new IdleConnectionEvictor( - mockThreadPool, - createConnectionManager(), - new TimeValue(1, TimeUnit.NANOSECONDS), - new TimeValue(1, TimeUnit.NANOSECONDS) - ); - - evictor.start(); - - verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + try ( + var evictor = new IdleConnectionEvictor( + mockThreadPool, + createConnectionManager(), + new TimeValue(1, TimeUnit.NANOSECONDS), + new TimeValue(1, TimeUnit.NANOSECONDS) + ) + ) { + evictor.start(); + + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + } } public void testStart_OnlyCallsSubmitOnce() throws IOReactorException { @@ -66,17 +68,19 @@ public void testStart_OnlyCallsSubmitOnce() throws IOReactorException { when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); - var evictor = new IdleConnectionEvictor( - mockThreadPool, - createConnectionManager(), - new TimeValue(1, TimeUnit.NANOSECONDS), - new TimeValue(1, TimeUnit.NANOSECONDS) - ); - - evictor.start(); - evictor.start(); - - verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + try ( + var evictor = new IdleConnectionEvictor( + mockThreadPool, + createConnectionManager(), + new TimeValue(1, TimeUnit.NANOSECONDS), + new TimeValue(1, TimeUnit.NANOSECONDS) + ) + ) { + evictor.start(); + evictor.start(); + + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + } } public void testCloseExpiredConnections_IsCalled() throws InterruptedException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java index 80a8c4d4914c3..becb0cc43e1e8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java @@ -10,12 +10,30 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ScalingExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.Collection; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -30,7 +48,8 @@ public static ClusterService mockClusterService(Settings settings) { var registeredSettings = Stream.of( HttpSettings.getSettings(), HttpClientManager.getSettings(), - HttpRequestSenderFactory.HttpRequestSender.getSettings() + HttpRequestSenderFactory.HttpRequestSender.getSettings(), + ThrottlerManager.getSettings() ).flatMap(Collection::stream).collect(Collectors.toSet()); var cSettings = new ClusterSettings(settings, registeredSettings); @@ -38,4 +57,41 @@ public static ClusterService mockClusterService(Settings settings) { return clusterService; } + + public static String getUrl(MockWebServer webServer) { + return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); + } + + public static Map entityAsMap(String body) throws IOException { + InputStream bodyStream = new ByteArrayInputStream(body.getBytes(StandardCharsets.UTF_8)); + + return entityAsMap(bodyStream); + } + + public static Map entityAsMap(InputStream body) throws IOException { + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + XContentParserConfiguration.EMPTY.withRegistry(NamedXContentRegistry.EMPTY) + .withDeprecationHandler(DeprecationHandler.THROW_UNSUPPORTED_OPERATION), + body + ) + ) { + return parser.map(); + } + } + + public static ThreadPool createThreadPool(String name) { + return new TestThreadPool( + name, + new ScalingExecutorBuilder( + UTILITY_THREAD_POOL_NAME, + 1, + 4, + TimeValue.timeValueMinutes(10), + false, + "xpack.inference.utility_thread_pool" + ) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java index 85f30c2aed05f..245ce09848a7f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java @@ -29,7 +29,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java index e2d78324a3c93..3434b951147d7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java @@ -17,11 +17,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; @@ -29,10 +31,11 @@ import java.nio.charset.StandardCharsets; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -48,20 +51,20 @@ public class HttpRequestSenderFactoryTests extends ESTestCase { private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; private HttpClientManager clientManager; - private Thread thread; + private final AtomicReference threadRef = new AtomicReference<>(); @Before public void init() throws Exception { webServer.start(); threadPool = createThreadPool(getTestName()); - clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty()); - thread = null; + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + threadRef.set(null); } @After public void shutdown() throws IOException, InterruptedException { - if (thread != null) { - thread.join(TIMEOUT.millis()); + if (threadRef.get() != null) { + threadRef.get().join(TIMEOUT.millis()); } clientManager.close(); @@ -70,20 +73,7 @@ public void shutdown() throws IOException, InterruptedException { } public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception { - var mockExecutorService = mock(ExecutorService.class); - doAnswer(invocation -> { - Runnable runnable = (Runnable) invocation.getArguments()[0]; - thread = new Thread(runnable); - thread.start(); - - return Void.TYPE; - }).when(mockExecutorService).execute(any(Runnable.class)); - - var mockThreadPool = mock(ThreadPool.class); - when(mockThreadPool.executor(anyString())).thenReturn(mockExecutorService); - when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - - var senderFactory = new HttpRequestSenderFactory(mockThreadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + var senderFactory = createSenderFactory(clientManager, threadRef); try (var sender = senderFactory.createSender("test_service")) { sender.start(); @@ -162,4 +152,22 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws ); } } + + private static HttpRequestSenderFactory createSenderFactory(HttpClientManager clientManager, AtomicReference threadRef) { + var mockExecutorService = mock(ExecutorService.class); + doAnswer(invocation -> { + Runnable runnable = (Runnable) invocation.getArguments()[0]; + threadRef.set(new Thread(runnable)); + threadRef.get().start(); + + return Void.TYPE; + }).when(mockExecutorService).execute(any(Runnable.class)); + + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.executor(anyString())).thenReturn(mockExecutorService); + when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(mockThreadPool.schedule(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.ScheduledCancellable.class)); + + return new HttpRequestSenderFactory(mockThreadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index 811881bb10c15..f3718954d8ad9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -38,8 +39,8 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createConnectionManager; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.emptyHttpSettings; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -80,7 +81,7 @@ public void testDoRun_SendsRequestAndReceivesResponse() throws Exception { String paramValue = randomAlphaOfLength(3); var httpPost = createHttpPost(webServer.getPort(), paramKey, paramValue); - try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager(), mock(ThrottlerManager.class))) { httpClient.start(); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java new file mode 100644 index 0000000000000..3463067143994 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestTests.createRequest; +import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class HuggingFaceClientTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(getTestName()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mockThrottlerManager()); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testSend_SuccessfulResponse() throws IOException, URISyntaxException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + [ + { + ".": 0.133155956864357 + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + + PlainActionFuture listener = new PlainActionFuture<>(); + huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + + InferenceResults result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(Map.of(DEFAULT_RESULTS_FIELD, Map.of(".", 0.13315596f)))); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + } + + public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyntaxException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + [ + { + "outputs": [ + [ + [ + ".", + ".", + 0.133155956864357 + ] + ] + ] + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + + PlainActionFuture listener = new PlainActionFuture<>(); + huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to parse the Hugging Face ELSER response for request [POST %s HTTP/1.1]", getUrl(webServer))) + ); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + } + + public void testSend_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + PlainActionFuture listener = new PlainActionFuture<>(); + + var thrownException = expectThrows( + ElasticsearchException.class, + () -> huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener) + ); + assertThat(thrownException.getMessage(), is("failed")); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java new file mode 100644 index 0000000000000..b0977da234c18 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.is; + +public class HuggingFaceElserRequestEntityTests extends ESTestCase { + + public void testXContent() throws IOException { + var entity = new HuggingFaceElserRequestEntity("abc"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + { + "inputs" : "abc" + }""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java new file mode 100644 index 0000000000000..717f5a7e2409d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserRequestTests extends ESTestCase { + public void testCreateRequest() throws URISyntaxException, IOException { + var huggingFaceRequest = createRequest("www.google.com", "secret", "abc"); + var httpRequest = huggingFaceRequest.createRequest(); + + assertThat(httpRequest, instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest; + + assertThat(httpPost.getURI().toString(), is("www.google.com")); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + + public static HuggingFaceElserRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { + var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); + var entity = new HuggingFaceElserRequestEntity(input); + + return new HuggingFaceElserRequest(account, entity); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java new file mode 100644 index 0000000000000..8cfac1858ab50 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentEOFException; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class HuggingFaceElserResponseEntityTests extends ESTestCase { + public void testFromResponse_CreatesTextExpansionResults() throws IOException { + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + } + ]"""; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + // the results get truncated because weighted token stores them as a float + assertThat(tokenWeightMap.size(), is(2)); + assertThat(tokenWeightMap.get("."), is(0.13315596f)); + assertThat(tokenWeightMap.get("the"), is(0.67472112f)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFromResponse_CreatesTextExpansionResultsForFirstItem() throws IOException { + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + }, + { + "hi": 0.133155956864357, + "super": 0.6747211217880249 + } + ]"""; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + // the results get truncated because weighted token stores them as a float + assertThat(tokenWeightMap.size(), is(2)); + assertThat(tokenWeightMap.get("."), is(0.13315596f)); + assertThat(tokenWeightMap.get("the"), is(0.67472112f)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFails_NotAnArray() { + String responseJson = """ + { + "field": "abc" + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + } + + public void testFails_ValueString() { + String responseJson = """ + [ + { + "field": "abc" + } + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]") + ); + } + + public void testFails_ValueInt() throws IOException { + String responseJson = """ + [ + { + "field": 1 + } + ] + """; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + assertThat(tokenWeightMap.size(), is(1)); + assertThat(tokenWeightMap.get("field"), is(1.0f)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFails_ValueLong() throws IOException { + String responseJson = """ + [ + { + "field": 40294967295 + } + ] + """; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + assertThat(tokenWeightMap.size(), is(1)); + assertThat(tokenWeightMap.get("field"), is(4.0294965E10F)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFails_ValueObject() { + String responseJson = """ + [ + { + "field": {} + } + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") + ); + } + + public void testFails_ResponseIsInvalidJson_MissingSquareBracket() { + String responseJson = """ + [ + { + "field": 0.1 + } + """; + + var thrownException = expectThrows( + XContentEOFException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), containsString("expected close marker for Array (start marker at [Source: (byte[])")); + } + + public void testFails_ResponseIsInvalidJson_MissingField() { + String responseJson = """ + [ + { + : 0.1 + } + ] + """; + + var thrownException = expectThrows( + XContentParseException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), containsString("Unexpected character")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java new file mode 100644 index 0000000000000..01374d02a21c3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class ThrottlerManagerTests extends ESTestCase { + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private ThreadPool threadPool; + + @Before + public void init() { + threadPool = createThreadPool(getTestName()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testStartsNewThrottler_WhenResetIntervalIsChanged() { + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); + + try (var manager = new ThrottlerManager(Settings.EMPTY, mockThreadPool, mockClusterServiceEmpty())) { + var resetInterval = TimeValue.timeValueSeconds(1); + var currentThrottler = manager.getThrottler(); + manager.setResetInterval(resetInterval); + // once for when the throttler is created initially + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(TimeValue.timeValueDays(1)), any()); + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(resetInterval), any()); + assertNotSame(currentThrottler, manager.getThrottler()); + } + } + + public void testDoesNotStartNewThrottler_WhenWaitDurationIsChanged() { + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); + + try (var manager = new ThrottlerManager(Settings.EMPTY, mockThreadPool, mockClusterServiceEmpty())) { + var currentThrottler = manager.getThrottler(); + + var waitDuration = TimeValue.timeValueSeconds(1); + manager.setWaitDuration(waitDuration); + // should only call when initializing the throttler + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(TimeValue.timeValueDays(1)), any()); + assertSame(currentThrottler, manager.getThrottler()); + } + } + + public static ThrottlerManager mockThrottlerManager() { + var mockManager = mock(ThrottlerManager.class); + when(mockManager.getThrottler()).thenReturn(mock(Throttler.class)); + + return mockManager; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java new file mode 100644 index 0000000000000..df95232ff85f7 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java @@ -0,0 +1,229 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneId; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class ThrottlerTests extends ESTestCase { + + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private ThreadPool threadPool; + + @Before + public void init() { + threadPool = createThreadPool(getTestName()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testWarn_LogsOnlyOnce() { + var logger = mock(Logger.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + Clock.fixed(Instant.now(), ZoneId.systemDefault()), + threadPool, + new ConcurrentHashMap<>() + ) + ) { + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } + } + + public void testWarn_LogsOnce_ThenOnceAfterDuration() { + var logger = mock(Logger.class); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + when(clock.instant()).thenReturn(now); + + // The first call is always logged + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); + // This call should be allowed because the clock thinks it's after the duration period + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(2)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now); + // This call should not be allowed because the clock doesn't think it's pasted the wait period + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } + } + + public void testWarn_AllowsDifferentMessagesToBeLogged() { + var logger = mock(Logger.class); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + throttled.warn(logger, "a different message", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("a different message"), any(Throwable.class)); + } + } + + public void testWarn_LogsRepeated1Time() { + var logger = mock(Logger.class); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + when(clock.instant()).thenReturn(now); + // first message is allowed + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now); // don't allow this message because duration hasn't expired + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // allow this message by faking expired duration + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test, repeated 1 time"), any(Throwable.class)); + } + } + + public void testWarn_LogsRepeated2Times() { + var logger = mock(Logger.class); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + when(clock.instant()).thenReturn(now); + // message allowed because it is the first one + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now); // don't allow these messages because duration hasn't expired + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // allow this message by faking the duration completion + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test, repeated 2 times"), any(Throwable.class)); + } + } + + public void testResetTask_ClearsInternalsAfterInterval() throws InterruptedException { + var calledClearLatch = new CountDownLatch(1); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var concurrentMap = mock(ConcurrentHashMap.class); + doAnswer(invocation -> { + calledClearLatch.countDown(); + + return Void.TYPE; + }).when(concurrentMap).clear(); + + try (@SuppressWarnings("unchecked") + var ignored = new Throttler(TimeValue.timeValueNanos(1), TimeValue.timeValueSeconds(10), clock, threadPool, concurrentMap)) { + calledClearLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + } + } + + public void testClose_DoesNotAllowLoggingAnyMore() { + var logger = mock(Logger.class); + + var clock = mock(Clock.class); + + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ); + + throttled.close(); + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java new file mode 100644 index 0000000000000..c3aa628705195 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserSecretSettingsTests extends AbstractWireSerializingTestCase { + + public static HuggingFaceElserSecretSettings createRandom() { + return new HuggingFaceElserSecretSettings(new SecureString(randomAlphaOfLength(15).toCharArray())); + } + + public void testFromMap() { + var apiKey = "abc"; + var serviceSettings = HuggingFaceElserSecretSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserSecretSettings.API_KEY, apiKey))); + + assertThat(new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())), is(serviceSettings)); + } + + public void testFromMap_MissingApiKey_ThrowsError() { + var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserSecretSettings.fromMap(new HashMap<>())); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format("[secret_settings] does not contain the required setting [%s]", HuggingFaceElserSecretSettings.API_KEY) + ) + ); + } + + public void testFromMap_EmptyApiKey_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceElserSecretSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserSecretSettings.API_KEY, ""))) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "[secret_settings] Invalid value empty string. [%s] must be a non-empty string", + HuggingFaceElserSecretSettings.API_KEY + ) + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return HuggingFaceElserSecretSettings::new; + } + + @Override + protected HuggingFaceElserSecretSettings createTestInstance() { + return createRandom(); + } + + @Override + protected HuggingFaceElserSecretSettings mutateInstance(HuggingFaceElserSecretSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java new file mode 100644 index 0000000000000..021904d7c2b67 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserServiceSettingsTests extends AbstractWireSerializingTestCase { + + public static HuggingFaceElserServiceSettings createRandom() { + return new HuggingFaceElserServiceSettings(randomAlphaOfLength(15)); + } + + public void testFromMap() { + var url = "https://www.abc.com"; + var serviceSettings = HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url))); + + assertThat(new HuggingFaceElserServiceSettings(url), is(serviceSettings)); + } + + public void testFromMap_MissingUrl_ThrowsError() { + var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>())); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format("[service_settings] does not contain the required setting [%s]", HuggingFaceElserServiceSettings.URL) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url))) + ); + + assertThat( + thrownException.getMessage(), + containsString(Strings.format("Invalid url [%s] received in setting [service_settings]", url)) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return HuggingFaceElserServiceSettings::new; + } + + @Override + protected HuggingFaceElserServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected HuggingFaceElserServiceSettings mutateInstance(HuggingFaceElserServiceSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 97c17d18d7164..b5c35e758a65c 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -115,13 +115,12 @@ public static class Defaults { public static final EnumSet METRICS = EnumSet.noneOf(Metric.class); } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter> meta = Parameter.metaParam(); private final Parameter ignoreMalformed; - @SuppressWarnings("this-escape") private final Parameter> metrics = new Parameter<>(Names.METRICS, false, () -> Defaults.METRICS, (n, c, o) -> { @SuppressWarnings("unchecked") List metricsList = (List) o; diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 62b02f5a3d850..90c055f3e77bb 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -76,7 +76,7 @@ private static UnsignedLongFieldMapper toType(FieldMapper in) { return (UnsignedLongFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter stored = Parameter.storeParam(m -> toType(m).stored, false); @@ -102,7 +102,6 @@ public Builder(String name, Settings settings, IndexMode mode) { this(name, IGNORE_MALFORMED_SETTING.get(settings), mode); } - @SuppressWarnings("this-escape") public Builder(String name, boolean ignoreMalformedByDefault, IndexMode mode) { super(name); this.ignoreMalformed = Parameter.explicitBoolParam( @@ -438,7 +437,7 @@ public CollapseType collapseType() { * null, if a value represents some other number * throws an exception if a value is wrongly formatted number */ - protected static Long parseTerm(Object value) { + static Long parseTerm(Object value) { if (value instanceof Number) { if ((value instanceof Long) || (value instanceof Integer) || (value instanceof Short) || (value instanceof Byte)) { long lv = ((Number) value).longValue(); @@ -472,7 +471,7 @@ protected static Long parseTerm(Object value) { * null, if value is higher than the maximum allowed value for unsigned long * throws an exception is value represents wrongly formatted number */ - protected static Long parseLowerRangeTerm(Object value, boolean include) { + static Long parseLowerRangeTerm(Object value, boolean include) { if ((value instanceof Long) || (value instanceof Integer) || (value instanceof Short) || (value instanceof Byte)) { long longValue = ((Number) value).longValue(); if (longValue < 0) return 0L; // limit lowerTerm to min value for unsigned long: 0 @@ -509,7 +508,7 @@ protected static Long parseLowerRangeTerm(Object value, boolean include) { * -1 (unsigned long of 18446744073709551615) for values greater than 18446744073709551615 * throws an exception is value represents wrongly formatted number */ - protected static Long parseUpperRangeTerm(Object value, boolean include) { + static Long parseUpperRangeTerm(Object value, boolean include) { if ((value instanceof Long) || (value instanceof Integer) || (value instanceof Short) || (value instanceof Byte)) { long longValue = ((Number) value).longValue(); if ((longValue < 0) || (longValue == 0 && include == false)) return null; // upperTerm is below minimum diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index 3c5b5c4243c5e..e52a60691d150 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -42,7 +42,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public class MlInitializationService implements ClusterStateListener { +public final class MlInitializationService implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(MlInitializationService.class); @@ -85,7 +85,6 @@ public class MlInitializationService implements ClusterStateListener { } // For testing - @SuppressWarnings("this-escape") public MlInitializationService( Client client, ThreadPool threadPool, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java index 11b0633ee7209..18d974473251b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java @@ -30,7 +30,7 @@ import static org.elasticsearch.core.Strings.format; -public class MlAutoscalingDeciderService implements AutoscalingDeciderService, LocalNodeMasterListener { +public final class MlAutoscalingDeciderService implements AutoscalingDeciderService, LocalNodeMasterListener { private static final Logger logger = LogManager.getLogger(MlAutoscalingDeciderService.class); @@ -46,7 +46,6 @@ public class MlAutoscalingDeciderService implements AutoscalingDeciderService, L private volatile boolean isMaster; private volatile int allocatedProcessorsScale; - @SuppressWarnings("this-escape") public MlAutoscalingDeciderService( MlMemoryTracker memoryTracker, Settings settings, @@ -56,7 +55,6 @@ public MlAutoscalingDeciderService( this(new NodeLoadDetector(memoryTracker), settings, nodeAvailabilityZoneMapper, clusterService, System::currentTimeMillis); } - @SuppressWarnings("this-escape") MlAutoscalingDeciderService( NodeLoadDetector nodeLoadDetector, Settings settings, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java index 5c89c29a70cdd..4b925f678602a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java @@ -22,6 +22,8 @@ import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.utils.MemoryTrackedTaskState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.NodeLoadDetector; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; @@ -130,22 +132,27 @@ static void getMemoryAndProcessors( autoscalingContext.modelAssignments.size() ); - // start with `minNodes = 1` if any ML job is started, further adjustments are made for trained models below - int minNodes = autoscalingContext.anomalyDetectionTasks.isEmpty() - && autoscalingContext.dataframeAnalyticsTasks.isEmpty() - && autoscalingContext.modelAssignments.isEmpty() ? 0 : 1; + // Start with `minNodes = 0`. If any ML job is started this will be increased to 1 in the loops below, + // and further adjustments are made for trained models depending on allocations. + int minNodes = 0; // anomaly detection for (var task : autoscalingContext.anomalyDetectionTasks) { + MemoryTrackedTaskState state = MlTasks.getMemoryTrackedTaskState(task); + if (state != null && state.consumesMemory() == false) { + continue; + } + String jobId = ((OpenJobAction.JobParams) task.getParams()).getJobId(); Long jobMemory = mlMemoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId); - if (jobMemory == null) { logger.debug("could not find memory requirement for job [{}], returning no-scale", jobId); listener.onResponse(noScaleStats(numberMlNodes)); return; } + minNodes = 1; + if (AWAITING_LAZY_ASSIGNMENT.equals(task.getAssignment())) { logger.debug("job [{}] lacks assignment , memory required [{}]", jobId, jobMemory); @@ -165,15 +172,21 @@ static void getMemoryAndProcessors( // data frame analytics for (var task : autoscalingContext.dataframeAnalyticsTasks) { + MemoryTrackedTaskState state = MlTasks.getMemoryTrackedTaskState(task); + if (state != null && state.consumesMemory() == false) { + continue; + } + String jobId = MlTasks.dataFrameAnalyticsId(task.getId()); Long jobMemory = mlMemoryTracker.getDataFrameAnalyticsJobMemoryRequirement(jobId); - if (jobMemory == null) { logger.debug("could not find memory requirement for job [{}], returning no-scale", jobId); listener.onResponse(noScaleStats(numberMlNodes)); return; } + minNodes = 1; + if (AWAITING_LAZY_ASSIGNMENT.equals(task.getAssignment())) { logger.debug("dfa job [{}] lacks assignment , memory required [{}]", jobId, jobMemory); @@ -192,12 +205,12 @@ static void getMemoryAndProcessors( // trained models for (var modelAssignment : autoscalingContext.modelAssignments.entrySet()) { - final int numberOfAllocations = modelAssignment.getValue().getTaskParams().getNumberOfAllocations(); - final int numberOfThreadsPerAllocation = modelAssignment.getValue().getTaskParams().getThreadsPerAllocation(); - final long estimatedMemoryUsage = modelAssignment.getValue().getTaskParams().estimateMemoryUsageBytes(); + TrainedModelAssignment assignment = modelAssignment.getValue(); + final int numberOfAllocations = assignment.getTaskParams().getNumberOfAllocations(); + final int numberOfThreadsPerAllocation = assignment.getTaskParams().getThreadsPerAllocation(); + final long estimatedMemoryUsage = assignment.getTaskParams().estimateMemoryUsageBytes(); - if (AssignmentState.STARTING.equals(modelAssignment.getValue().getAssignmentState()) - && modelAssignment.getValue().getNodeRoutingTable().isEmpty()) { + if (AssignmentState.STARTING.equals(assignment.getAssignmentState()) && assignment.getNodeRoutingTable().isEmpty()) { logger.debug( () -> format( @@ -216,6 +229,9 @@ static void getMemoryAndProcessors( extraSingleNodeProcessors = Math.max(extraSingleNodeProcessors, numberOfThreadsPerAllocation); extraProcessors += numberOfAllocations * numberOfThreadsPerAllocation; } + } else if (assignment.getNodeRoutingTable().values().stream().allMatch(r -> r.getState().consumesMemory() == false)) { + // Ignore states that don't consume memory, for example all allocations are failed + continue; } else { logger.debug( () -> format( @@ -229,9 +245,6 @@ static void getMemoryAndProcessors( modelMemoryBytesSum += estimatedMemoryUsage; processorsSum += numberOfAllocations * numberOfThreadsPerAllocation; - // min(3, max(number of allocations over all deployed models) - minNodes = Math.min(3, Math.max(minNodes, numberOfAllocations)); - for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { perNodeModelMemoryInBytes.computeIfAbsent(node, k -> new ArrayList<>()) .add( @@ -244,6 +257,9 @@ static void getMemoryAndProcessors( ); } } + + // min(3, max(number of allocations over all deployed models) + minNodes = Math.min(3, Math.max(minNodes, numberOfAllocations)); } // check for downscaling diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java index 108b7eaff06e3..df2f66f6c5a42 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java @@ -29,7 +29,6 @@ public class NodeFakeAvailabilityZoneMapper extends AbstractNodeAvailabilityZone private static final Logger logger = LogManager.getLogger(NodeFakeAvailabilityZoneMapper.class); - @SuppressWarnings("this-escape") public NodeFakeAvailabilityZoneMapper(Settings settings, ClusterSettings clusterSettings) { this(settings, clusterSettings, null); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java index 24da7d2e46563..cf55af02b8146 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java @@ -34,7 +34,6 @@ public class NodeRealAvailabilityZoneMapper extends AbstractNodeAvailabilityZone private volatile List awarenessAttributes; - @SuppressWarnings("this-escape") public NodeRealAvailabilityZoneMapper(Settings settings, ClusterSettings clusterSettings) { this(settings, clusterSettings, null); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 7fc61172a5b7d..5ba577eb90ab7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -71,7 +71,7 @@ public class MlMemoryTracker implements LocalNodeMasterListener { private static final Duration RECENT_UPDATE_THRESHOLD = Duration.ofMinutes(1); private static final Duration DEFAULT_AUTOSCALING_CHECK_INTERVAL = Duration.ofMinutes(5); - private final Logger logger = LogManager.getLogger(MlMemoryTracker.class); + private static final Logger logger = LogManager.getLogger(MlMemoryTracker.class); private final Map memoryRequirementByAnomalyDetectorJob = new ConcurrentHashMap<>(); private final Map memoryRequirementByDataFrameAnalyticsJob = new ConcurrentHashMap<>(); private final Map> memoryRequirementByTaskName; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java index 05478deac811c..7ea63cf7945f0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java @@ -14,12 +14,16 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; @@ -34,7 +38,9 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.ml.autoscaling.MlAutoscalingResourceTracker.MlJobRequirements; +import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class MlAutoscalingResourceTrackerTests extends ESTestCase { @@ -83,6 +89,137 @@ public void testGetMemoryAndProcessors() throws InterruptedException { ); } + public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignment() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0" + ); + String jobId = "lazy-job"; + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of( + new PersistentTasksCustomMetadata.PersistentTask<>( + MlTasks.jobTaskId(jobId), + MlTasks.JOB_TASK_NAME, + new OpenJobAction.JobParams(jobId), + 1, + AWAITING_LAZY_ASSIGNMENT + ) + ), + List.of(), + List.of(), + Map.of(), + List.of( + DiscoveryNodeUtils.builder("ml-1") + .name("ml-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build(), + DiscoveryNodeUtils.builder("ml-2") + .name("ml-2") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + when(mockTracker.getAnomalyDetectorJobMemoryRequirement(jobId)).thenReturn(memory / 4); + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-1", memory, "ml-2", memory), + memory / 2, + 10, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + listener + ), + stats -> { + assertEquals(memory, stats.perNodeMemoryInBytes()); + assertEquals(2, stats.nodes()); + assertEquals(1, stats.minNodes()); + assertEquals(0, stats.extraSingleNodeProcessors()); + assertEquals(memory / 4, stats.extraSingleNodeModelMemoryInBytes()); + assertEquals(memory / 4, stats.extraModelMemoryInBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + } + ); + } + + public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignmentButFailed() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0" + ); + String jobId = "lazy-job"; + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of( + new PersistentTasksCustomMetadata.PersistentTask<>( + new PersistentTasksCustomMetadata.PersistentTask<>( + MlTasks.jobTaskId(jobId), + MlTasks.JOB_TASK_NAME, + new OpenJobAction.JobParams(jobId), + 1, + AWAITING_LAZY_ASSIGNMENT + ), + new JobTaskState(JobState.FAILED, 1, "a nasty bug") + ) + ), + List.of(), + List.of(), + Map.of(), + List.of( + DiscoveryNodeUtils.builder("ml-1") + .name("ml-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build(), + DiscoveryNodeUtils.builder("ml-2") + .name("ml-2") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + when(mockTracker.getAnomalyDetectorJobMemoryRequirement(jobId)).thenReturn(memory / 4); + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-1", memory, "ml-2", memory), + memory / 2, + 10, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + listener + ), + stats -> { + assertEquals(memory, stats.perNodeMemoryInBytes()); + assertEquals(2, stats.nodes()); + assertEquals(0, stats.minNodes()); + assertEquals(0, stats.extraSingleNodeProcessors()); + assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); + assertEquals(0, stats.extraModelMemoryInBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + } + ); + } + public void testCheckIfJobsCanBeMovedInLeastEfficientWayMemoryOnly() { assertEquals( 0L, @@ -897,7 +1034,6 @@ public void testGetMemoryAndProcessorsScaleDown() throws InterruptedException { ) ).addRoutingEntry("ml-node-3", new RoutingInfo(1, 1, RoutingState.STARTED, "")).build() ), - List.of( DiscoveryNodeUtils.builder("ml-node-1") .name("ml-node-name-1") diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java index 1eda86b030464..e04d925f429fe 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java @@ -51,7 +51,6 @@ public class CleanerService extends AbstractLifecycleComponent { clusterSettings.addSettingsUpdateConsumer(MonitoringField.HISTORY_DURATION, this::setGlobalRetention); } - @SuppressWarnings("this-escape") public CleanerService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { this(settings, clusterSettings, threadPool, new DefaultExecutionScheduler()); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index ba43cf82d1458..20421ca909503 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -75,7 +75,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener, LicenseStateListener { +public final class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener, LicenseStateListener { private static final Logger logger = LogManager.getLogger(LocalExporter.class); @@ -108,7 +108,6 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle private long stateInitializedTime; - @SuppressWarnings("this-escape") public LocalExporter( Exporter.Config config, Client client, diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java index cbf1d606e3a54..4dd92b125b88c 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java @@ -26,9 +26,8 @@ /** * Tests {@link HttpResource}. */ -public class HttpResourceTests extends ESTestCase { +public final class HttpResourceTests extends ESTestCase { - @SuppressWarnings("this-escape") private final String owner = getTestName(); private final RestClient mockClient = mock(RestClient.class); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java index 464fd052f5425..23a9ee28b9ac9 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java @@ -19,10 +19,9 @@ /** * Tests {@link NodeFailureListener}. */ -public class NodeFailureListenerTests extends ESTestCase { +public final class NodeFailureListenerTests extends ESTestCase { private final Sniffer sniffer = mock(Sniffer.class); - @SuppressWarnings("this-escape") private final HttpResource resource = new MockHttpResource(getTestName(), false); private final Node node = new Node(new HttpHost("localhost", 9200)); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java index f341a1fadc226..9ef6b5482c735 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java @@ -32,13 +32,12 @@ /** * Tests {@link WatcherExistsHttpResource}. */ -public class WatcherExistsHttpResourceTests extends AbstractPublishableHttpResourceTestCase { +public final class WatcherExistsHttpResourceTests extends AbstractPublishableHttpResourceTestCase { private final ClusterService clusterService = mock(ClusterService.class); private final MultiHttpResource mockWatches = mock(MultiHttpResource.class); private final WatcherExistsHttpResource resource = new WatcherExistsHttpResource(owner, clusterService, mockWatches); - @SuppressWarnings("this-escape") private final Map expectedParameters = getParameters(resource.getDefaultParameters(), GET_EXISTS, XPACK_DOES_NOT_EXIST); public void testDoCheckIgnoresClientWhenNotElectedMaster() { diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java index afc13567e59b7..c9c5a7c7a4861 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java @@ -26,7 +26,7 @@ import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; -public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { +public final class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(FailShardsOnInvalidLicenseClusterListener.class); @@ -38,7 +38,6 @@ public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateLi private boolean allowed; - @SuppressWarnings("this-escape") public FailShardsOnInvalidLicenseClusterListener(XPackLicenseState xPackLicenseState, RerouteService rerouteService) { this.xPackLicenseState = xPackLicenseState; this.rerouteService = rerouteService; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java index 8d7f81d28579c..2e796a04200fe 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java @@ -35,13 +35,12 @@ import java.util.Map; /** Reads the metadata of point values previously written with Lucene60PointsWriter */ -public class Lucene60MetadataOnlyPointsReader extends PointsReader { +public final class Lucene60MetadataOnlyPointsReader extends PointsReader { final IndexInput dataIn; final SegmentReadState readState; final Map readers = new HashMap<>(); /** Sole constructor */ - @SuppressWarnings("this-escape") public Lucene60MetadataOnlyPointsReader(SegmentReadState readState) throws IOException { this.readState = readState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java index b0de50cac8b5b..ad418e1bbc5e8 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; import org.elasticsearch.xpack.core.ilm.OperationMode; +import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction; import org.junit.After; import org.junit.Before; @@ -328,7 +329,16 @@ public void testAllResourcesPresentButOutdated() { Map metadata = new HashMap<>(policy.getMetadata()); // outdated version metadata.put("version", randomIntBetween(1, ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION - 1)); - policies.put(policy.getName(), new LifecyclePolicy(policy.getName(), policy.getPhases(), metadata)); + policies.put( + policy.getName(), + new LifecyclePolicy( + TimeseriesLifecycleType.INSTANCE, + policy.getName(), + policy.getPhases(), + metadata, + policy.getDeprecated() + ) + ); } ClusterState clusterState = createClusterState(Settings.EMPTY, componentTemplates, composableTemplates, policies, nodes); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java new file mode 100644 index 0000000000000..3713da26cb261 --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.ql; + +/** + * Exception thrown when unable to continue processing client request, + * in cases such as invalid query parameter or failure to apply requested processing to given data. + * It's meant as a generic equivalent to QlIllegalArgumentException (that's a server exception). + * TODO: reason for [E|S|ES]QL specializations of QlIllegalArgumentException? + * TODO: the intended use of ql.ParsingException, vs its [E|S|ES]QL equivalents, subclassed from the respective XxxClientException? + * Same for PlanningException. + */ +public class InvalidArgumentException extends QlClientException { + + public InvalidArgumentException(String message, Object... args) { + super(message, args); + } + + public InvalidArgumentException(Throwable cause, String message, Object... args) { + super(cause, message, args); + } + +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java index ca7b33291717a..e343bb7be1894 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ql; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.tree.Source; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java index e0aba6507c7fc..0a28096c8df48 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.ql; +import org.elasticsearch.rest.RestStatus; + /** * Exception thrown by performing client (or user) code. * Typically it means the given action or query is incorrect and needs fixing. */ -public abstract class QlClientException extends QlException { +public class QlClientException extends QlException { protected QlClientException(String message, Object... args) { super(message, args); @@ -31,4 +33,9 @@ protected QlClientException(Throwable cause, String message, Object... args) { protected QlClientException(Throwable cause) { super(cause); } + + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java index 5e0aa654392e7..0defe009c6f97 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -193,7 +194,8 @@ protected Object unwrapFieldsMultiValue(Object values) { } values = unwrappedValues; } else { - throw new QlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); + // missing `field_multi_value_leniency` setting + throw new InvalidArgumentException("Arrays (returned by [{}]) are not supported", fieldName); } } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java index 01ff4d67b1027..f2e9e8b04d3c4 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java @@ -32,7 +32,7 @@ * Worth noting the {@link #combine(AttributeMap)}, {@link #intersect(AttributeMap)} and {@link #subtract(AttributeMap)} methods which * return copies, decoupled from the input maps. In other words the returned maps can be modified without affecting the input or vice-versa. */ -public class AttributeMap implements Map { +public final class AttributeMap implements Map { static class AttributeWrapper { @@ -155,7 +155,7 @@ public String toString() { private static final AttributeMap EMPTY = new AttributeMap<>(emptyMap()); @SuppressWarnings("unchecked") - public static final AttributeMap emptyAttributeMap() { + public static AttributeMap emptyAttributeMap() { return EMPTY; } @@ -169,7 +169,6 @@ public AttributeMap() { delegate = new LinkedHashMap<>(); } - @SuppressWarnings("this-escape") public AttributeMap(Attribute key, E value) { delegate = new LinkedHashMap<>(); add(key, value); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java index 7461a3f70a338..7b0012b58bf98 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java @@ -17,7 +17,7 @@ /** * @param expression type */ -public class ExpressionSet implements Set { +public final class ExpressionSet implements Set { @SuppressWarnings("rawtypes") public static final ExpressionSet EMPTY = new ExpressionSet<>(emptyList()); @@ -34,7 +34,6 @@ public ExpressionSet() { super(); } - @SuppressWarnings("this-escape") public ExpressionSet(Collection c) { addAll(c); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java index efb6809ae428c..cd6a68f135c43 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java @@ -15,11 +15,10 @@ import static java.util.Collections.emptyList; -public class StringQueryPredicate extends FullTextPredicate { +public final class StringQueryPredicate extends FullTextPredicate { private final Map fields; - @SuppressWarnings("this-escape") public StringQueryPredicate(Source source, String query, String options) { super(source, query, options, emptyList()); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java index 9fb9ce7336812..d2314384384ba 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.ql.index; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.QlClientException; public class MappingException extends QlClientException { @@ -15,12 +14,4 @@ public MappingException(String message, Object... args) { super(message, args); } - public MappingException(String message, Throwable ex) { - super(message, ex); - } - - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java index 28719b279614a..db062c289501c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java @@ -16,10 +16,9 @@ import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArraySet; -public class RemoteClusterResolver extends RemoteClusterAware { +public final class RemoteClusterResolver extends RemoteClusterAware { private final CopyOnWriteArraySet clusters; - @SuppressWarnings("this-escape") public RemoteClusterResolver(Settings settings, ClusterSettings clusterSettings) { super(settings); clusters = new CopyOnWriteArraySet<>(getEnabledRemoteClusters(settings)); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java index 02a9f0f05e255..bb7fa9cf8c03a 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Booleans; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; @@ -363,28 +363,28 @@ private static DefaultConverter conversionToBoolean(DataType from) { public static byte safeToByte(long x) { if (x > Byte.MAX_VALUE || x < Byte.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [byte] range"); + throw new InvalidArgumentException("[{}] out of [byte] range", x); } return (byte) x; } public static short safeToShort(long x) { if (x > Short.MAX_VALUE || x < Short.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [short] range"); + throw new InvalidArgumentException("[{}] out of [short] range", x); } return (short) x; } public static int safeToInt(long x) { if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [integer] range"); + throw new InvalidArgumentException("[{}] out of [integer] range", x); } return (int) x; } public static long safeDoubleToLong(double x) { if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [long] range"); + throw new InvalidArgumentException("[{}] out of [long] range", x); } return Math.round(x); } @@ -400,20 +400,20 @@ public static Long safeToLong(Number x) { } return x.longValue(); } catch (ArithmeticException ae) { - throw new QlIllegalArgumentException("[" + x + "] out of [long] range", ae); + throw new InvalidArgumentException(ae, "[{}] out of [long] range", x); } } public static BigInteger safeToUnsignedLong(Double x) { if (inUnsignedLongRange(x) == false) { - throw new QlIllegalArgumentException("[" + x + "] out of [unsigned_long] range"); + throw new InvalidArgumentException("[{}] out of [unsigned_long] range", x); } return BigDecimal.valueOf(x).toBigInteger(); } public static BigInteger safeToUnsignedLong(Long x) { if (x < 0) { - throw new QlIllegalArgumentException("[" + x + "] out of [unsigned_long] range"); + throw new InvalidArgumentException("[{}] out of [unsigned_long] range", x); } return BigInteger.valueOf(x); } @@ -421,7 +421,7 @@ public static BigInteger safeToUnsignedLong(Long x) { public static BigInteger safeToUnsignedLong(String x) { BigInteger bi = new BigDecimal(x).toBigInteger(); if (isUnsignedLong(bi) == false) { - throw new QlIllegalArgumentException("[" + x + "] out of [unsigned_long] range"); + throw new InvalidArgumentException("[{}] out of [unsigned_long] range", x); } return bi; } @@ -451,7 +451,7 @@ public static Number toInteger(double x, DataType dataType) { public static boolean convertToBoolean(String val) { String lowVal = val.toLowerCase(Locale.ROOT); if (Booleans.isBoolean(lowVal) == false) { - throw new QlIllegalArgumentException("cannot cast [" + val + "] to [boolean]"); + throw new InvalidArgumentException("cannot cast [{}] to [boolean]", val); } return Booleans.parseBoolean(lowVal); } @@ -459,7 +459,7 @@ public static boolean convertToBoolean(String val) { /** * Converts arbitrary object to the desired data type. *

- * Throws QlIllegalArgumentException if such conversion is not possible + * Throws InvalidArgumentException if such conversion is not possible */ public static Object convert(Object value, DataType dataType) { DataType detectedType = DataTypes.fromJava(value); @@ -469,7 +469,7 @@ public static Object convert(Object value, DataType dataType) { Converter converter = converterFor(detectedType, dataType); if (converter == null) { - throw new QlIllegalArgumentException( + throw new InvalidArgumentException( "cannot convert from [{}], type [{}] to [{}]", value, detectedType.typeName(), @@ -546,7 +546,7 @@ public enum DefaultConverter implements Converter { STRING_TO_IP(o -> { if (InetAddresses.isInetAddress(o.toString()) == false) { - throw new QlIllegalArgumentException("[" + o + "] is not a valid IPv4 or IPv6 address"); + throw new InvalidArgumentException("[{}] is not a valid IPv4 or IPv6 address", o); } return o; }), @@ -573,9 +573,9 @@ public static Function fromString(Function conve try { return converter.apply(value.toString()); } catch (NumberFormatException e) { - throw new QlIllegalArgumentException(e, "cannot cast [{}] to [{}]", value, to); + throw new InvalidArgumentException(e, "cannot cast [{}] to [{}]", value, to); } catch (DateTimeParseException | IllegalArgumentException e) { - throw new QlIllegalArgumentException(e, "cannot cast [{}] to [{}]: {}", value, to, e.getMessage()); + throw new InvalidArgumentException(e, "cannot cast [{}] to [{}]: {}", value, to, e.getMessage()); } }; } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java index beebf0d581444..83c731ce4e7a9 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java @@ -17,7 +17,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import java.io.IOException; import java.math.BigInteger; @@ -42,6 +42,8 @@ private StringUtils() {} private static final String[] INTEGER_ORDINALS = new String[] { "th", "st", "nd", "rd", "th", "th", "th", "th", "th", "th" }; + private static final String INVALID_REGEX_SEQUENCE = "Invalid sequence - escape character is not followed by special wildcard char"; + // CamelCase to camel_case public static String camelCaseToUnderscore(String string) { if (Strings.hasText(string) == false) { @@ -109,7 +111,7 @@ public static String likeToJavaPattern(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { escaped = true; if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } } else { switch (curr) { @@ -117,9 +119,7 @@ public static String likeToJavaPattern(String pattern, char escape) { case '_' -> regex.append(escaped ? "_" : "."); default -> { if (escaped) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // escape special regex characters switch (curr) { @@ -151,7 +151,7 @@ public static String wildcardToJavaPattern(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { escaped = true; if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } } else { switch (curr) { @@ -159,9 +159,7 @@ public static String wildcardToJavaPattern(String pattern, char escape) { case '?' -> regex.append(escaped ? "\\?" : "."); default -> { if (escaped && escape != curr) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // escape special regex characters switch (curr) { @@ -197,7 +195,7 @@ public static String likeToLuceneWildcard(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } escaped = true; } else { @@ -206,9 +204,7 @@ public static String likeToLuceneWildcard(String pattern, char escape) { case '_' -> wildcard.append(escaped ? "_" : "?"); default -> { if (escaped) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // escape special regex characters switch (curr) { @@ -238,7 +234,7 @@ public static String likeToIndexWildcard(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } escaped = true; } else { @@ -247,9 +243,7 @@ public static String likeToIndexWildcard(String pattern, char escape) { case '_' -> wildcard.append(escaped ? "_" : "*"); default -> { if (escaped) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // the resolver doesn't support escaping... wildcard.append(curr); @@ -311,24 +305,24 @@ public static List findSimilar(String match, Iterable potentialM return scoredMatches.stream().map(a -> a.v2()).collect(toList()); } - public static double parseDouble(String string) throws QlIllegalArgumentException { + public static double parseDouble(String string) throws InvalidArgumentException { double value; try { value = Double.parseDouble(string); } catch (NumberFormatException nfe) { - throw new QlIllegalArgumentException("Cannot parse number [{}]", string); + throw new InvalidArgumentException(nfe, "Cannot parse number [{}]", string); } if (Double.isInfinite(value)) { - throw new QlIllegalArgumentException("Number [{}] is too large", string); + throw new InvalidArgumentException("Number [{}] is too large", string); } if (Double.isNaN(value)) { - throw new QlIllegalArgumentException("[{}] cannot be parsed as a number (NaN)", string); + throw new InvalidArgumentException("[{}] cannot be parsed as a number (NaN)", string); } return value; } - public static long parseLong(String string) throws QlIllegalArgumentException { + public static long parseLong(String string) throws InvalidArgumentException { try { return Long.parseLong(string); } catch (NumberFormatException nfe) { @@ -337,25 +331,25 @@ public static long parseLong(String string) throws QlIllegalArgumentException { try { bi.longValueExact(); } catch (ArithmeticException ae) { - throw new QlIllegalArgumentException("Number [{}] is too large", string); + throw new InvalidArgumentException("Number [{}] is too large", string); } } catch (NumberFormatException ex) { // parsing fails, go through } - throw new QlIllegalArgumentException("Cannot parse number [{}]", string); + throw new InvalidArgumentException("Cannot parse number [{}]", string); } } - public static Number parseIntegral(String string) throws QlIllegalArgumentException { + public static Number parseIntegral(String string) throws InvalidArgumentException { BigInteger bi; try { bi = new BigInteger(string); } catch (NumberFormatException ex) { - throw new QlIllegalArgumentException("Cannot parse number [{}]", string); + throw new InvalidArgumentException(ex, "Cannot parse number [{}]", string); } if (bi.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { if (isUnsignedLong(bi) == false) { - throw new QlIllegalArgumentException("Number [{}] is too large", string); + throw new InvalidArgumentException("Number [{}] is too large", string); } return bi; } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java index c71f34307a6bf..13436e983af0a 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ql.expression; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.ql.tree.SourceTests; import org.elasticsearch.xpack.ql.type.Converter; @@ -133,7 +133,7 @@ private List validReplacementDataTypes(Object value, DataType type) { Converter c = DataTypeConverter.converterFor(type, candidate); c.convert(value); validDataTypes.add(candidate); - } catch (QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { // invalid conversion then.... } } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java index 3d439acc04776..c0cb0f6667b59 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ql.type; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Source; @@ -35,7 +35,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.UNSUPPORTED; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; -import static org.elasticsearch.xpack.ql.util.NumericUtils.UNSIGNED_LONG_MAX; public class DataTypeConversionTests extends ESTestCase { @@ -72,7 +71,7 @@ public void testConversionToLong() { assertEquals(10L, conversion.convert(10.0)); assertEquals(10L, conversion.convert(10.1)); assertEquals(11L, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -82,7 +81,7 @@ public void testConversionToLong() { assertEquals(bi.longValue(), conversion.convert(bi)); BigInteger longPlus = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(longPlus)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(longPlus)); assertEquals("[" + longPlus + "] out of [long] range", e.getMessage()); } { @@ -110,7 +109,7 @@ public void testConversionToLong() { assertNull(conversion.convert(null)); assertEquals(1L, conversion.convert("1")); assertEquals(0L, conversion.convert("-0")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [long]", e.getMessage()); } } @@ -123,7 +122,7 @@ public void testConversionToDateTime() { assertEquals(asDateTime(10L), conversion.convert(10.0)); assertEquals(asDateTime(10L), conversion.convert(10.1)); assertEquals(asDateTime(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -133,7 +132,7 @@ public void testConversionToDateTime() { assertEquals(asDateTime(bi.longValue()), conversion.convert(bi)); BigInteger longPlus = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(longPlus)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(longPlus)); assertEquals("[" + longPlus + "] out of [long] range", e.getMessage()); } { @@ -175,7 +174,7 @@ public void testConversionToDateTime() { Converter forward = converterFor(DATETIME, KEYWORD); Converter back = converterFor(KEYWORD, DATETIME); assertEquals(dt, back.convert(forward.convert(dt))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [datetime]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } @@ -222,7 +221,7 @@ public void testConversionToFloat() { assertEquals(1.0f, (float) conversion.convert("1"), 0); assertEquals(0.0f, (float) conversion.convert("-0"), 0); assertEquals(12.776f, (float) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [float]", e.getMessage()); } } @@ -269,7 +268,7 @@ public void testConversionToDouble() { assertEquals(1.0, (double) conversion.convert("1"), 0); assertEquals(0.0, (double) conversion.convert("-0"), 0); assertEquals(12.776, (double) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [double]", e.getMessage()); } } @@ -326,17 +325,17 @@ public void testConversionToBoolean() { assertEquals(true, conversion.convert("True")); assertEquals(false, conversion.convert("fAlSe")); // Everything else should fail - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10")); assertEquals("cannot cast [10] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-1")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-1")); assertEquals("cannot cast [-1] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0")); assertEquals("cannot cast [0] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("blah")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("blah")); assertEquals("cannot cast [blah] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("Yes")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("Yes")); assertEquals("cannot cast [Yes] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("nO")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("nO")); assertEquals("cannot cast [nO] to [boolean]", e.getMessage()); } } @@ -350,11 +349,11 @@ public void testConversionToUnsignedLong() { assertEquals(BigDecimal.valueOf(d).toBigInteger(), conversion.convert(d)); Double ulmAsDouble = UNSIGNED_LONG_MAX.doubleValue(); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(ulmAsDouble)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(ulmAsDouble)); assertEquals("[" + ulmAsDouble + "] out of [unsigned_long] range", e.getMessage()); Double nd = -Math.abs(randomDouble()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(nd)); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(nd)); assertEquals("[" + nd + "] out of [unsigned_long] range", e.getMessage()); } { @@ -364,7 +363,7 @@ public void testConversionToUnsignedLong() { BigInteger bi = BigInteger.valueOf(randomNonNegativeLong()); assertEquals(bi, conversion.convert(bi.longValue())); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bi.negate())); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bi.negate())); assertEquals("[" + bi.negate() + "] out of [unsigned_long] range", e.getMessage()); } { @@ -374,7 +373,7 @@ public void testConversionToUnsignedLong() { long l = randomNonNegativeLong(); assertEquals(BigInteger.valueOf(l), conversion.convert(asDateTime(l))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(-l))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(-l))); assertEquals("[" + -l + "] out of [unsigned_long] range", e.getMessage()); } { @@ -395,9 +394,9 @@ public void testConversionToUnsignedLong() { assertEquals(bi, conversion.convert(bi.toString() + "." + randomNonNegativeLong())); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(BigInteger.ONE.negate().toString())); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(BigInteger.ONE.negate().toString())); assertEquals("[-1] out of [unsigned_long] range", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString())); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString())); assertEquals("[" + UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString() + "] out of [unsigned_long] range", e.getMessage()); } } @@ -410,7 +409,7 @@ public void testConversionToInt() { assertEquals(10, conversion.convert(10.0)); assertEquals(10, conversion.convert(10.1)); assertEquals(11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } { @@ -420,7 +419,7 @@ public void testConversionToInt() { assertEquals(bi.intValueExact(), conversion.convert(bi)); BigInteger bip = BigInteger.valueOf(randomLongBetween(Integer.MAX_VALUE, Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bip)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip)); assertEquals("[" + bip + "] out of [integer] range", e.getMessage()); } { @@ -431,7 +430,7 @@ public void testConversionToInt() { assertEquals(-123456789, conversion.convert(asDateTime(-123456789L))); // Nanos are ignored, only millis are used assertEquals(62123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:01:02.123456789Z"))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Long.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Long.MAX_VALUE))); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } } @@ -444,7 +443,7 @@ public void testConversionToShort() { assertEquals((short) 10, conversion.convert(10.0)); assertEquals((short) 10, conversion.convert(10.1)); assertEquals((short) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } { @@ -454,7 +453,7 @@ public void testConversionToShort() { assertEquals(bi.shortValueExact(), conversion.convert(bi)); BigInteger bip = BigInteger.valueOf(randomLongBetween(Short.MAX_VALUE, Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bip)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip)); assertEquals("[" + bip + "] out of [short] range", e.getMessage()); } { @@ -464,7 +463,7 @@ public void testConversionToShort() { assertEquals((short) -12345, conversion.convert(asDateTime(-12345L))); // Nanos are ignored, only millis are used assertEquals((short) 1123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:00:01.123456789Z"))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } } @@ -477,7 +476,7 @@ public void testConversionToByte() { assertEquals((byte) 10, conversion.convert(10.0)); assertEquals((byte) 10, conversion.convert(10.1)); assertEquals((byte) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); assertEquals("[" + Short.MAX_VALUE + "] out of [byte] range", e.getMessage()); } { @@ -487,7 +486,7 @@ public void testConversionToByte() { assertEquals(bi.byteValueExact(), conversion.convert(bi)); BigInteger bip = BigInteger.valueOf(randomLongBetween(Byte.MAX_VALUE, Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bip)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip)); assertEquals("[" + bip + "] out of [byte] range", e.getMessage()); } { @@ -497,7 +496,7 @@ public void testConversionToByte() { assertEquals((byte) -123, conversion.convert(asDateTime(-123L))); // Nanos are ignored, only millis are used assertEquals((byte) 123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:00:00.123456789Z"))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); assertEquals("[" + Integer.MAX_VALUE + "] out of [byte] range", e.getMessage()); } } @@ -546,7 +545,7 @@ public void testEsDataTypes() { } public void testConversionToUnsupported() { - Exception e = expectThrows(QlIllegalArgumentException.class, () -> DataTypeConverter.convert(Integer.valueOf(1), UNSUPPORTED)); + Exception e = expectThrows(InvalidArgumentException.class, () -> DataTypeConverter.convert(Integer.valueOf(1), UNSUPPORTED)); assertEquals("cannot convert from [1], type [integer] to [unsupported]", e.getMessage()); } @@ -554,7 +553,7 @@ public void testStringToIp() { Converter conversion = converterFor(KEYWORD, IP); assertNull(conversion.convert(null)); assertEquals("192.168.1.1", conversion.convert("192.168.1.1")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10.1.1.300")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10.1.1.300")); assertEquals("[10.1.1.300] is not a valid IPv4 or IPv6 address", e.getMessage()); } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 0b7de9a0996ef..509d4d5012f52 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -186,8 +186,11 @@ tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } } // classes are missing, e.g. com.ibm.icu.lang.UCharacter diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 72c3abec8d3d4..3e98dfe60ea20 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -1,4 +1,4 @@ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' @@ -52,7 +52,10 @@ if (BuildParams.inFipsJvm) { } // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. - tasks.withType(CheckForbiddenApis).configureEach { - bundledSignatures -= "jdk-non-portable" + tasks.withType(CheckForbiddenApisTask).configureEach { + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= "jdk-non-portable" + bundledSignatures + } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java index 252cbea619ee8..44f7a6d47e361 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.xpack.ssl; import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.Strings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; @@ -35,7 +33,6 @@ * by verifying if the cluster goes back to normal. */ public class SSLReloadDuringStartupIntegTests extends SecurityIntegTestCase { - private final Logger LOGGER = LogManager.getLogger(SSLReloadDuringStartupIntegTests.class); private final String goodKeyStoreFilePath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"; private final String badKeyStoreFilePath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"; @@ -100,18 +97,18 @@ public void testReloadDuringStartup() throws Exception { final Path badKeystorePath = getDataPath(badKeyStoreFilePath); // stop a node, and apply this bad keystore final Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); // start the node, and apply this good keystore assertTrue(Files.exists(nodeKeystorePath)); - LOGGER.trace("Stopping node [{}] in {}-node cluster {}...", nodeName, nodeNames.length, nodeNames); + logger.trace("Stopping node [{}] in {}-node cluster {}...", nodeName, nodeNames.length, nodeNames); final long stopNanos = System.nanoTime(); internalCluster().restartNode(nodeName, new RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { - LOGGER.debug("Node [{}] stopped in {}ms.", nodeName, TimeValue.timeValueNanos(System.nanoTime() - stopNanos).millisFrac()); + logger.debug("Node [{}] stopped in {}ms.", nodeName, TimeValue.timeValueNanos(System.nanoTime() - stopNanos).millisFrac()); atomicCopyIfPossible(badKeystorePath, nodeKeystorePath); final Thread fixKeystoreThread = new Thread(() -> { waitUntilNodeStartupIsReadyToBegin(beforeKeystoreFix); // SYNC: Cert update & ES restart try { atomicCopyIfPossible(goodKeystorePath, nodeKeystorePath); - LOGGER.trace("Waiting for ES restart..."); + logger.trace("Waiting for ES restart..."); afterKeystoreFix.countDown(); // SYNC: Cert update & ES restart } catch (IOException e) { throw new UncheckedIOException(e); @@ -123,30 +120,30 @@ public Settings onNodeStopped(String nodeName) throws Exception { return super.onNodeStopped(nodeName); // ASSUME: RestartCallback will do ES start next } }); - LOGGER.trace("Waiting for keystore fix..."); - timed(LOGGER, Level.DEBUG, "Awaited {}ms. Verifying the cluster...", () -> { + logger.trace("Waiting for keystore fix..."); + timed(Level.DEBUG, "Awaited {}ms. Verifying the cluster...", () -> { try { afterKeystoreFix.await(); // SYNC: Verify cluster after cert update } catch (InterruptedException e) { throw new RuntimeException(e); } }); - timed(LOGGER, Level.TRACE, "Ensure cluster size consistency took {}ms.", this::ensureClusterSizeConsistency); - timed(LOGGER, Level.TRACE, "Ensure fully connected cluster took {}ms.", this::ensureFullyConnectedCluster); + timed(Level.TRACE, "Ensure cluster size consistency took {}ms.", this::ensureClusterSizeConsistency); + timed(Level.TRACE, "Ensure fully connected cluster took {}ms.", this::ensureFullyConnectedCluster); } private void waitUntilNodeStartupIsReadyToBegin(final CountDownLatch beforeKeystoreFix) { - LOGGER.trace("Waiting for ES start to begin..."); + logger.trace("Waiting for ES start to begin..."); beforeKeystoreFix.countDown(); // SYNC: Cert update & ES restart final long sleepMillis = randomLongBetween(1L, 2000L); // intended sleepMillis - timed(LOGGER, Level.DEBUG, "Awaited {}ms. Sleeping " + sleepMillis + "ms before fixing...", () -> { + timed(Level.DEBUG, "Awaited {}ms. Sleeping " + sleepMillis + "ms before fixing...", () -> { try { beforeKeystoreFix.await(); // SYNC: Cert update & ES restart } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }); - timed(LOGGER, Level.DEBUG, "Slept {}ms, intended " + sleepMillis + "ms. Fixing can start now...", () -> { + timed(Level.DEBUG, "Slept {}ms, intended " + sleepMillis + "ms. Fixing can start now...", () -> { try { Thread.sleep(sleepMillis); // Simulate cert update delay relative to ES start } catch (InterruptedException e) { @@ -156,9 +153,9 @@ private void waitUntilNodeStartupIsReadyToBegin(final CountDownLatch beforeKeyst } private void waitUntilFixKeystoreIsReadyToBegin(final CountDownLatch beforeKeystoreFix) { - LOGGER.trace("Waiting for keystore fix to begin..."); + logger.trace("Waiting for keystore fix to begin..."); beforeKeystoreFix.countDown(); // SYNC: Cert update & ES restart - timed(LOGGER, Level.DEBUG, "Awaited {}ms. Node can start now...", () -> { + timed(Level.DEBUG, "Awaited {}ms. Node can start now...", () -> { try { beforeKeystoreFix.await(); // SYNC: Cert update & ES restart } catch (InterruptedException e) { @@ -167,8 +164,7 @@ private void waitUntilFixKeystoreIsReadyToBegin(final CountDownLatch beforeKeyst }); } - static void timed(final Logger LOGGER, final Level level, final String message, final Runnable runnable) { - assert LOGGER != null; + private void timed(final Level level, final String message, final Runnable runnable) { assert level != null; assert Strings.isEmpty(message) == false; assert message.contains("{}ms") : "Message must contain {}ms"; @@ -178,7 +174,7 @@ static void timed(final Logger LOGGER, final Level level, final String message, try { runnable.run(); } finally { - LOGGER.log(level, message, TimeValue.timeValueNanos(System.nanoTime() - startNanos).millisFrac()); + logger.log(level, message, TimeValue.timeValueNanos(System.nanoTime() - startNanos).millisFrac()); } } @@ -191,16 +187,16 @@ static void timed(final Logger LOGGER, final Level level, final String message, * @throws IOException Cannot create temp file, or copy source file to temp file, or non-atomic move temp file to target file. */ private void atomicCopyIfPossible(Path source, Path target) throws IOException { - LOGGER.trace("Copying [{}] to [{}]", source, target); + logger.trace("Copying [{}] to [{}]", source, target); Path tmp = createTempFile(); - LOGGER.trace("Created temporary file [{}]", tmp); + logger.trace("Created temporary file [{}]", tmp); Files.copy(source, tmp, StandardCopyOption.REPLACE_EXISTING); try { Files.move(tmp, target, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); - LOGGER.debug("Atomic move succeeded from [{}] to [{}]", tmp, target); + logger.debug("Atomic move succeeded from [{}] to [{}]", tmp, target); } catch (AtomicMoveNotSupportedException e) { Files.move(tmp, target, StandardCopyOption.REPLACE_EXISTING); - LOGGER.debug("Non-atomic move succeeded from [{}] to [{}]", tmp, target); + logger.debug("Non-atomic move succeeded from [{}] to [{}]", tmp, target); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java index c18428837965c..7388b668563b3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java @@ -41,7 +41,7 @@ public class DelegatedAuthorizationSupport { private final RealmUserLookup lookup; - private final Logger logger; + private static final Logger logger = LogManager.getLogger(DelegatedAuthorizationSupport.class); private final XPackLicenseState licenseState; /** @@ -74,7 +74,6 @@ protected DelegatedAuthorizationSupport( final List resolvedLookupRealms = resolveRealms(allRealms, lookupRealms); checkForRealmChains(resolvedLookupRealms, settings); this.lookup = new RealmUserLookup(resolvedLookupRealms, threadContext); - this.logger = LogManager.getLogger(getClass()); this.licenseState = licenseState; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticator.java index 008aa9df6bdd4..ca09d6fafbe44 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticator.java @@ -37,7 +37,7 @@ public class SecondaryAuthenticator { */ public static final String SECONDARY_AUTH_HEADER_NAME = "es-secondary-authorization"; - private final Logger logger = LogManager.getLogger(SecondaryAuthenticator.class); + private static final Logger logger = LogManager.getLogger(SecondaryAuthenticator.class); private final SecurityContext securityContext; private final AuthenticationService authenticationService; private final AuditTrailService auditTrailService; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/SetSecurityUserProcessor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/SetSecurityUserProcessor.java index ca8ab469ce74d..62a1a1466ec84 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/SetSecurityUserProcessor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/SetSecurityUserProcessor.java @@ -40,7 +40,7 @@ public final class SetSecurityUserProcessor extends AbstractProcessor { public static final String TYPE = "set_security_user"; - private final Logger logger = LogManager.getLogger(SetSecurityUserProcessor.class); + private static final Logger logger = LogManager.getLogger(SetSecurityUserProcessor.class); private final SecurityContext securityContext; private final Settings settings; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/NativeRoleBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/NativeRoleBaseRestHandler.java index 7a7fa306ede25..773d0a8a5ecfd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/NativeRoleBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/NativeRoleBaseRestHandler.java @@ -19,7 +19,7 @@ abstract class NativeRoleBaseRestHandler extends SecurityBaseRestHandler { - private final Logger logger = LogManager.getLogger(NativeRoleBaseRestHandler.class); + private static final Logger logger = LogManager.getLogger(NativeRoleBaseRestHandler.class); NativeRoleBaseRestHandler(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/NativeUserBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/NativeUserBaseRestHandler.java index 2c1056d33d02c..b367a528628e6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/NativeUserBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/NativeUserBaseRestHandler.java @@ -19,7 +19,7 @@ abstract class NativeUserBaseRestHandler extends SecurityBaseRestHandler { - private final Logger logger = LogManager.getLogger(NativeUserBaseRestHandler.class); + private static final Logger logger = LogManager.getLogger(NativeUserBaseRestHandler.class); NativeUserBaseRestHandler(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 3213f990d56bc..22aa7e296d6ed 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -52,7 +52,7 @@ public class SecuritySystemIndices { public static final String SECURITY_PROFILE_ALIAS = ".security-profile"; public static final Version VERSION_SECURITY_PROFILE_ORIGIN = Version.V_8_3_0; - private final Logger logger = LogManager.getLogger(SecuritySystemIndices.class); + private static final Logger logger = LogManager.getLogger(SecuritySystemIndices.class); private final SystemIndexDescriptor mainDescriptor; private final SystemIndexDescriptor tokenDescriptor; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java index 9775e461c4165..413358f784dea 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.license.License; import org.elasticsearch.license.LicensesMetadata; import org.elasticsearch.license.TestUtils; +import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.test.AbstractBootstrapCheckTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackSettings; @@ -38,7 +39,10 @@ public void testFailureUpgradeFrom7xWithImplicitSecuritySettings() throws Except NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - Metadata metadata = createLicensesMetadata(previousVersion, randomFrom("basic", "trial")); + Metadata metadata = createLicensesMetadata( + TrialLicenseVersion.fromXContent(previousVersion.toString()), + randomFrom("basic", "trial") + ); License license = mock(License.class); when(licenseService.getLicense(metadata)).thenReturn(license); when(license.operationMode()).thenReturn(randomFrom(License.OperationMode.BASIC, License.OperationMode.TRIAL)); @@ -70,7 +74,10 @@ public void testUpgradeFrom7xWithImplicitSecuritySettingsOnGoldPlus() throws Exc NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - Metadata metadata = createLicensesMetadata(previousVersion, randomFrom("gold", "platinum")); + Metadata metadata = createLicensesMetadata( + TrialLicenseVersion.fromXContent(previousVersion.toString()), + randomFrom("gold", "platinum") + ); License license = mock(License.class); when(licenseService.getLicense(metadata)).thenReturn(license); when(license.operationMode()).thenReturn(randomFrom(License.OperationMode.GOLD, License.OperationMode.PLATINUM)); @@ -91,7 +98,7 @@ public void testUpgradeFrom7xWithExplicitSecuritySettings() throws Exception { BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( createTestContext( Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(), - createLicensesMetadata(previousVersion, randomFrom("basic", "trial")) + createLicensesMetadata(TrialLicenseVersion.fromXContent(previousVersion.toString()), randomFrom("basic", "trial")) ) ); assertThat(result.isSuccess(), is(true)); @@ -103,7 +110,10 @@ public void testUpgradeFrom8xWithImplicitSecuritySettings() throws Exception { nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( - createTestContext(Settings.EMPTY, createLicensesMetadata(previousVersion, randomFrom("basic", "trial"))) + createTestContext( + Settings.EMPTY, + createLicensesMetadata(TrialLicenseVersion.fromXContent(previousVersion.toString()), randomFrom("basic", "trial")) + ) ); assertThat(result.isSuccess(), is(true)); } @@ -116,14 +126,14 @@ public void testUpgradeFrom8xWithExplicitSecuritySettings() throws Exception { BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( createTestContext( Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(), - createLicensesMetadata(previousVersion, randomFrom("basic", "trial")) + createLicensesMetadata(TrialLicenseVersion.fromXContent(previousVersion.toString()), randomFrom("basic", "trial")) ) ); assertThat(result.isSuccess(), is(true)); } - private Metadata createLicensesMetadata(Version version, String licenseMode) throws Exception { + private Metadata createLicensesMetadata(TrialLicenseVersion era, String licenseMode) throws Exception { License license = TestUtils.generateSignedLicense(licenseMode, TimeValue.timeValueHours(2)); - return Metadata.builder().putCustom(LicensesMetadata.TYPE, new LicensesMetadata(license, version)).build(); + return Metadata.builder().putCustom(LicensesMetadata.TYPE, new LicensesMetadata(license, era)).build(); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 66790c9898230..74c5a17844892 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -780,7 +780,7 @@ public void testSecurityRestHandlerInterceptorCanBeInstalled() throws IllegalAcc List.of(), RestExtension.allowAll() ); - actionModule.initRestHandlers(null); + actionModule.initRestHandlers(null, null); appender.assertAllExpectationsMatched(); } finally { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java index fcae0fa6c09f6..5621bdced15b3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java @@ -31,9 +31,8 @@ * in an integration test class, because the reserved handlers are injected through * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) */ -public class UnstableLocalStateSecurity extends LocalStateSecurity { +public final class UnstableLocalStateSecurity extends LocalStateSecurity { - @SuppressWarnings("this-escape") public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { super(settings, configPath); // We reuse most of the initialization of LocalStateSecurity, we then just overwrite diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java index 5c53179b5aa9f..19202bb109215 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java @@ -21,11 +21,10 @@ import java.util.HashMap; import java.util.Map; -public class DummyUsernamePasswordRealm extends UsernamePasswordRealm { +public final class DummyUsernamePasswordRealm extends UsernamePasswordRealm { private Map> users; - @SuppressWarnings("this-escape") public DummyUsernamePasswordRealm(RealmConfig config) { super(config); initRealmRef( diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java index 15e16d2a86910..d6bef01672faf 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java @@ -160,7 +160,7 @@ public List getNamedXContent() { } } - public static class TaskExecutor extends PersistentTasksExecutor implements ClusterStateListener { + public static final class TaskExecutor extends PersistentTasksExecutor implements ClusterStateListener { private final PersistentTasksService persistentTasksService; diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java index f9d8f69d888c0..554b617774db3 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java @@ -33,14 +33,13 @@ * * Currently, this consists of keeping track of whether we've seen nodes which are marked for shutdown. */ -public class NodeSeenService implements ClusterStateListener { +public final class NodeSeenService implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(NodeSeenService.class); final ClusterService clusterService; private final MasterServiceTaskQueue setSeenTaskQueue; - @SuppressWarnings("this-escape") public NodeSeenService(ClusterService clusterService) { this.clusterService = clusterService; this.setSeenTaskQueue = clusterService.createTaskQueue( diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java index bb1e08b9561a5..a4f73e0e3bdac 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java @@ -45,7 +45,7 @@ * * SLM must be running to fix warning reported by this indicator. */ -public class SlmHealthIndicatorService implements HealthIndicatorService { +public final class SlmHealthIndicatorService implements HealthIndicatorService { public static final String NAME = "slm"; @@ -83,7 +83,6 @@ static Diagnosis.Definition checkRecentlyFailedSnapshots(String causeText, Strin private final ClusterService clusterService; private volatile long failedSnapshotWarnThreshold; - @SuppressWarnings("this-escape") public SlmHealthIndicatorService(ClusterService clusterService) { this.clusterService = clusterService; this.failedSnapshotWarnThreshold = clusterService.getClusterSettings().get(SLM_HEALTH_FAILED_SNAPSHOT_WARN_THRESHOLD_SETTING); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/main/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerService.java b/x-pack/plugin/snapshot-based-recoveries/src/main/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerService.java index 5e93003b04aa9..f58c6599050de 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/main/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerService.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/main/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerService.java @@ -37,7 +37,7 @@ import static org.elasticsearch.indices.recovery.RecoverySettings.SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION; public class SnapshotsRecoveryPlannerService implements RecoveryPlannerService { - private final Logger logger = LogManager.getLogger(SnapshotsRecoveryPlannerService.class); + private static final Logger logger = LogManager.getLogger(SnapshotsRecoveryPlannerService.class); private final ShardSnapshotsService shardSnapshotsService; private final BooleanSupplier isLicenseActive; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 89b0c1c6ef922..892e251285f14 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -108,7 +108,7 @@ private static Builder builder(FieldMapper in) { return ((GeoShapeWithDocValuesFieldMapper) in).builder; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { final Parameter indexed = Parameter.indexParam(m -> builder(m).indexed.get(), true); final Parameter stored = Parameter.storeParam(m -> builder(m).stored.get(), false); @@ -125,7 +125,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion version; private final GeoFormatterFactory geoFormatterFactory; - @SuppressWarnings("this-escape") public Builder( String name, IndexVersion version, @@ -145,7 +144,7 @@ public Builder( } // for testing - protected Builder setStored(boolean stored) { + Builder setStored(boolean stored) { this.stored.setValue(stored); return this; } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java index 8bf939aeae494..534c08f39c7e6 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Map; -public class GeoHexGridAggregationBuilder extends GeoGridAggregationBuilder { +public final class GeoHexGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geohex_grid"; private static final int DEFAULT_PRECISION = 5; private static final int DEFAULT_MAX_NUM_CELLS = 10000; @@ -51,7 +51,6 @@ static int parsePrecision(XContentParser parser) throws IOException, Elasticsear return XContentMapValues.nodeIntegerValue(node); } - @SuppressWarnings("this-escape") public GeoHexGridAggregationBuilder(String name) { super(name); precision(DEFAULT_PRECISION); diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java index 76f84541e5bb9..ab63913760fea 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java @@ -27,10 +27,8 @@ public void testDefaultNoLenient() throws IOException { index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); assertThat( command("SELECT * FROM test"), - containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + containsString("[?1l>[?1000l[?2004l[31;1mBad request [[3;33;22mArrays (returned by [tags]) are not supported[23;31;1m][0m") ); - while ("][23;31;1m][0m".equals(readLine()) == false) - ; // clean console to avoid failures on shutdown } public void testExplicitNoLenient() throws IOException { @@ -38,9 +36,7 @@ public void testExplicitNoLenient() throws IOException { assertEquals("[?1l>[?1000l[?2004llenient set to [90mfalse[0m", command("lenient = false")); assertThat( command("SELECT * FROM test"), - containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + containsString("[?1l>[?1000l[?2004l[31;1mBad request [[3;33;22mArrays (returned by [tags]) are not supported[23;31;1m][0m") ); - while ("][23;31;1m][0m".equals(readLine()) == false) - ; // clean console to avoid failures on shutdown } } diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index ac6600b09427e..4a20e00666ea4 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -23,8 +23,11 @@ tasks.named('forbiddenApisMain').configure { } tasks.named('forbiddenApisTest').configure { - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } } tasks.named("forbiddenPatterns").configure { diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java index b41c5864a28b0..0fe7d15d19b53 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java @@ -345,6 +345,7 @@ public static SqlExceptionType fromRemoteFailureType(String type) { case "analysis_exception": case "resource_not_found_exception": case "verification_exception": + case "invalid_argument_exception": return DATA; case "planning_exception": case "mapping_exception": diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java index 52e14ca1fd2be..2789ecdbf1204 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java @@ -13,7 +13,7 @@ import java.util.Objects; import java.util.Set; -public class RequestInfo { +public final class RequestInfo { private static final String CANVAS = "canvas"; public static final String ODBC_32 = "odbc32"; private static final String ODBC_64 = "odbc64"; @@ -46,14 +46,12 @@ public RequestInfo(Mode mode, String clientId) { this(mode, clientId, null); } - @SuppressWarnings("this-escape") public RequestInfo(Mode mode, String clientId, String version) { mode(mode); clientId(clientId); version(version); } - @SuppressWarnings("this-escape") public RequestInfo(Mode mode, SqlVersion version) { mode(mode); this.version = version; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java index 22d6118a5d483..7d105c4b4132e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java @@ -8,16 +8,12 @@ import org.elasticsearch.xpack.ql.QlClientException; -public abstract class SqlClientException extends QlClientException { +public class SqlClientException extends QlClientException { protected SqlClientException(String message, Object... args) { super(message, args); } - protected SqlClientException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - protected SqlClientException(String message, Throwable cause) { super(message, cause); } @@ -26,7 +22,4 @@ protected SqlClientException(Throwable cause, String message, Object... args) { super(cause, message, args); } - protected SqlClientException(Throwable cause) { - super(cause); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index c0bf3efa74f13..2debdccc7c999 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -78,7 +78,7 @@ import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; -public class Analyzer extends ParameterizedRuleExecutor { +public final class Analyzer extends ParameterizedRuleExecutor { private static final Iterable> rules; @@ -114,7 +114,6 @@ public class Analyzer extends ParameterizedRuleExecutor sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/SqlFunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/SqlFunctionRegistry.java index e0c4cc23919bd..73f936a960a5b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/SqlFunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/SqlFunctionRegistry.java @@ -139,9 +139,8 @@ import static java.util.Collections.unmodifiableList; -public class SqlFunctionRegistry extends FunctionRegistry { +public final class SqlFunctionRegistry extends FunctionRegistry { - @SuppressWarnings("this-escape") public SqlFunctionRegistry() { register(functions()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java index 54bdbeb0b2441..abe374bc3f2a8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAdd.Part; @@ -52,9 +53,9 @@ public static Object process(Object unit, Object numberOfUnits, Object timestamp if (datePartField == null) { List similar = Part.findSimilar((String) unit); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part to add; " + "did you mean {}?", unit, similar diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java index 2ceca156af458..ee6063ba466f5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -14,7 +15,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -114,7 +114,7 @@ private static long diffInSeconds(ZonedDateTime start, ZonedDateTime end) { private static int safeInt(long diff) { if (diff > Integer.MAX_VALUE || diff < Integer.MIN_VALUE) { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "The DATE_DIFF function resulted in an overflow; the number of units " + "separating two date/datetime instances is too large. Try to use DATE_DIFF with a less precise unit." ); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java index 225bcecacd183..f41c06d13cdf3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiff.Part; @@ -52,9 +53,9 @@ public static Object process(Object unit, Object startTimestamp, Object endTimes if (datePartField == null) { List similar = Part.findSimilar((String) unit); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part to add; " + "did you mean {}?", unit, similar diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java index c9ef6431369e4..c37e69c132d39 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePart.Part; @@ -52,9 +53,9 @@ public static Object process(Object part, Object timestamp, ZoneId zoneId) { if (datePartField == null) { List similar = Part.findSimilar((String) part); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), part); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), part); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part for extraction; " + "did you mean {}?", part, similar diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java index 72125968c2742..91a96678189f5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -224,7 +225,7 @@ public Object format(Object timestamp, Object pattern, ZoneId zoneId) { try { return formatterFor(patternString).apply(ta); } catch (IllegalArgumentException | DateTimeException e) { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Invalid pattern [{}] is received for formatting date/time [{}]; {}", pattern, timestamp, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java index 2b9801e513881..08fc1d621b508 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -72,7 +73,7 @@ public Object parse(Object timestamp, Object pattern, ZoneId zoneId) { if (msg.contains("Unable to convert parsed text using any of the specified queries")) { msg = format(null, "Unable to convert parsed text into [{}]", this.parseType); } - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Invalid {} string [{}] or pattern [{}] is received; {}", this.parseType, timestamp, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java index 03c0e88852964..3ea22f182e355 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.interval.IntervalDayTime; @@ -55,13 +56,9 @@ public static Object process(Object truncateTo, Object timestamp, ZoneId zoneId) if (truncateDateField == null) { List similar = Part.findSimilar((String) truncateTo); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException( - "A value of {} or their aliases is required; received [{}]", - Part.values(), - truncateTo - ); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), truncateTo); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part for truncation; " + "did you mean {}?", truncateTo, similar @@ -72,10 +69,10 @@ public static Object process(Object truncateTo, Object timestamp, ZoneId zoneId) if (timestamp instanceof ZonedDateTime == false && timestamp instanceof IntervalYearMonth == false && timestamp instanceof IntervalDayTime == false) { - throw new SqlIllegalArgumentException("A date/datetime/interval is required; received [{}]", timestamp); + throw new SqlIllegalArgumentException("A date/datetime/interval is required; received [{}]", timestamp); // verifier checked } if (truncateDateField == Part.WEEK && (timestamp instanceof IntervalDayTime || timestamp instanceof IntervalYearMonth)) { - throw new SqlIllegalArgumentException("Truncating intervals is not supported for {} units", truncateTo); + throw new InvalidArgumentException("Truncating intervals is not supported for {} units", truncateTo); } if (timestamp instanceof ZonedDateTime) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java index c56feacdb9d0c..37a801d7ec3b5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.geo.GeoShape; @@ -42,7 +43,7 @@ public static GeoShape apply(Object input) { try { return new GeoShape(input); } catch (IOException | IllegalArgumentException | ElasticsearchParseException ex) { - throw new SqlIllegalArgumentException("Cannot parse [{}] as a geo_shape value", input); + throw new InvalidArgumentException("Cannot parse [{}] as a geo_shape value", input); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java index dee8cdc870e0e..5bec38a459c2c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.ql.type.DataTypeConverter; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -38,14 +38,14 @@ public enum MathOperation { long lo = ((Number) l).longValue(); if (lo == Long.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); + throw new InvalidArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); } lo = lo < 0 ? -lo : lo; if (l instanceof Integer) { if ((int) lo == Integer.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); + throw new InvalidArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); } return DataTypeConverter.safeToInt(lo); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java index 386686f4f17d7..6e5070a409035 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java @@ -30,6 +30,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantNamedWriteable; @@ -60,7 +61,7 @@ public GeoShape(Object value) throws IOException { try { shape = parse(value); } catch (ParseException ex) { - throw new QlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); + throw new InvalidArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } @@ -69,7 +70,7 @@ public GeoShape(StreamInput in) throws IOException { try { shape = parse(value); } catch (ParseException ex) { - throw new QlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); + throw new InvalidArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 848939662a4f0..67756d4951d6c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Tuple; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; @@ -706,7 +707,7 @@ public Literal visitDecimalLiteral(DecimalLiteralContext ctx) { try { return new Literal(tuple.v1(), Double.valueOf(StringUtils.parseDouble(tuple.v2())), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException siae) { throw new ParsingException(tuple.v1(), siae.getMessage()); } } @@ -717,7 +718,7 @@ public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { try { Number value = StringUtils.parseIntegral(tuple.v2()); return new Literal(tuple.v1(), value, DataTypes.fromJava(value)); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException siae) { throw new ParsingException(tuple.v1(), siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java index 3462a0ed411c4..a553d3f64f27f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.parser; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.sql.SqlClientException; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java index b289d42b24de6..76c69d215ae24 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java @@ -29,7 +29,7 @@ import static java.util.Collections.singletonList; -public class Pivot extends UnaryPlan { +public final class Pivot extends UnaryPlan { private final Expression column; private final List values; @@ -45,7 +45,6 @@ public Pivot(Source source, LogicalPlan child, Expression column, List sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 90a4970fde9df..e39420d1fbefb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -62,7 +62,7 @@ import static org.elasticsearch.xpack.sql.plugin.Transports.username; import static org.elasticsearch.xpack.sql.proto.Mode.CLI; -public class TransportSqlQueryAction extends HandledTransportAction +public final class TransportSqlQueryAction extends HandledTransportAction implements AsyncTaskManagementService.AsyncOperation { @@ -74,7 +74,6 @@ public class TransportSqlQueryAction extends HandledTransportAction asyncTaskManagementService; - @SuppressWarnings("this-escape") @Inject public TransportSqlQueryAction( Settings settings, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java index 38f13dbe974f6..2e5b2d6a657c2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.sql.util; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; /** @@ -26,12 +27,6 @@ public static void isTrue(boolean expression, String message) { } } - public static void notNull(Object object, String message) { - if (object == null) { - throw new SqlIllegalArgumentException(message); - } - } - public static void notNull(Object object, String message, Object... values) { if (object == null) { throw new SqlIllegalArgumentException(message, values); @@ -40,7 +35,7 @@ public static void notNull(Object object, String message, Object... values) { public static void isFixedNumberAndInRange(Object object, String objectName, Long from, Long to) { if ((object instanceof Number) == false || object instanceof Float || object instanceof Double) { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "A fixed point number is required for [{}]; received [{}]", objectName, object.getClass().getTypeName() @@ -48,7 +43,7 @@ public static void isFixedNumberAndInRange(Object object, String objectName, Lon } Long longValue = ((Number) object).longValue(); if (longValue < from || longValue > to) { - throw new SqlIllegalArgumentException("[{}] out of the allowed range [{}, {}], received [{}]", objectName, from, to, longValue); + throw new InvalidArgumentException("[{}] out of the allowed range [{}, {}], received [{}]", objectName, from, to, longValue); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index aca64a467934d..5c3fc378d90c1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.literal.geo.GeoShape; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -146,7 +146,7 @@ public void testMultiValuedDocValue() { DocumentField field = new DocumentField(fieldName, asList("a", "b")); SearchHit hit = new SearchHit(1, null); hit.setDocumentField(fieldName, field); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); } @@ -165,7 +165,7 @@ public void testMultiValuedSource() { DocumentField field = new DocumentField("a", asList(value, value)); SearchHit hit = new SearchHit(1, null); hit.setDocumentField("a", field); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported")); } @@ -207,7 +207,7 @@ public void testMultipleGeoShapeExtraction() { SearchHit hit = new SearchHit(1, null); hit.setDocumentField(fieldName, field); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, LENIENT); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java index b8fa3e4def773..e8014903699c9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.type.DataTypeConverter.DefaultConverter; import org.elasticsearch.xpack.sql.type.SqlDataTypeConverter.SqlConverter; @@ -38,7 +38,7 @@ public void testApply() { CastProcessor proc = new CastProcessor(DefaultConverter.STRING_TO_INT); assertEquals(null, proc.process(null)); assertEquals(1, proc.process("1")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> proc.process("1.2")); + Exception e = expectThrows(InvalidArgumentException.class, () -> proc.process("1.2")); assertEquals("cannot cast [1.2] to [integer]", e.getMessage()); } { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java index 98cfda78b0fff..98300eae130db 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -62,28 +63,28 @@ protected DateAddProcessor mutateInstance(DateAddProcessor instance) { } public void testInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateAdd(Source.EMPTY, l(5), l(10), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateAdd(Source.EMPTY, l("days"), l("foo"), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A number is required; received [foo]", siae.getMessage()); + assertEquals("A number is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateAdd(Source.EMPTY, l("days"), l(10), l("foo"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateAdd(Source.EMPTY, l("invalid"), l(10), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) @@ -91,16 +92,16 @@ public void testInvalidInputs() { assertEquals( "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, " + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateAdd(Source.EMPTY, l("quertar"), l(10), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", siae.getMessage()); + assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", e.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java index c15d00250b237..b4d760d4cae71 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -60,32 +61,32 @@ protected DateDiffProcessor mutateInstance(DateDiffProcessor instance) { } public void testInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateDiff(Source.EMPTY, l(5), randomDatetimeLiteral(), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateDiff(Source.EMPTY, l("days"), l("foo"), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateDiff(Source.EMPTY, l("days"), randomDatetimeLiteral(), l("foo"), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) @@ -93,16 +94,16 @@ public void testInvalidInputs() { assertEquals( "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, " + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("quertar"), randomDatetimeLiteral(), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", siae.getMessage()); + assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", e.getMessage()); } public void testWithNulls() { @@ -305,114 +306,114 @@ public void testOverflow() { Literal dt1 = l(dateTime(-99992022, 12, 31, 20, 22, 33, 123456789, ZoneId.of("Etc/GMT-5"))); Literal dt2 = l(dateTime(99992022, 4, 18, 8, 33, 22, 987654321, ZoneId.of("Etc/GMT+5"))); - SqlIllegalArgumentException siae = expectThrows( - SqlIllegalArgumentException.class, + Exception e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("month"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("dayofyear"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("day"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("week"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("weekday"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("hours"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("minute"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("second"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("milliseconds"), dt2, dt1, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("mcs"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("nanoseconds"), dt2, dt1, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java index 436c49c589af2..615275f60bece 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -57,33 +58,33 @@ protected DatePartProcessor mutateInstance(DatePartProcessor instance) { } public void testInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DatePart(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DatePart(Source.EMPTY, l("days"), l("foo"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DatePart(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, " + "MICROSECOND, NANOSECOND, TZOFFSET] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DatePart(Source.EMPTY, l("dayfyear"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Received value [dayfyear] is not valid date part for extraction; did you mean [dayofyear, year]?", siae.getMessage()); + assertEquals("Received value [dayfyear] is not valid date part for extraction; did you mean [dayofyear, year]?", e.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java index 2c83f666d6f37..997447c525e43 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -70,61 +71,61 @@ protected DateTimeFormatProcessor mutateInstance(DateTimeFormatProcessor instanc } public void testDateTimeFormatInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, l("foo"), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime/time is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime/time is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, randomDatetimeLiteral(), l(5), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, l(dateTime(2019, 9, 3, 18, 10, 37, 0)), l("invalid"), randomZone()).makePipe() .asProcessor() .process(null) ); assertEquals( "Invalid pattern [invalid] is received for formatting date/time [2019-09-03T18:10:37Z]; Unknown pattern letter: i", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, l(time(18, 10, 37, 123000000)), l("MM/dd"), randomZone()).makePipe() .asProcessor() .process(null) ); assertEquals( "Invalid pattern [MM/dd] is received for formatting date/time [18:10:37.123Z]; Unsupported field: MonthOfYear", - siae.getMessage() + e.getMessage() ); } public void testFormatInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Format(Source.EMPTY, l("foo"), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime/time is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime/time is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new Format(Source.EMPTY, randomDatetimeLiteral(), l(5), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Format(Source.EMPTY, l(time(18, 10, 37, 123000000)), l("MM/dd"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid pattern [MM/dd] is received for formatting date/time [18:10:37.123Z]; Unsupported field: MonthOfYear", - siae.getMessage() + e.getMessage() ); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java index ac93ee69fa8a0..1bac217ef8ac5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase; @@ -63,44 +64,44 @@ protected DateTimeParseProcessor mutateInstance(DateTimeParseProcessor instance) } public void testDateTimeInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [10]", siae.getMessage()); + assertEquals("A string is required; received [10]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [20]", siae.getMessage()); + assertEquals("A string is required; received [20]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("2020-04-07"), l("invalid"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Invalid datetime string [2020-04-07] or pattern [invalid] is received; Unknown pattern letter: i", siae.getMessage()); + assertEquals("Invalid datetime string [2020-04-07] or pattern [invalid] is received; Unknown pattern letter: i", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("2020-04-07"), l("MM/dd"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid datetime string [2020-04-07] or pattern [MM/dd] is received; Text '2020-04-07' could not be parsed at index 2", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("07/05/2020"), l("dd/MM/uuuu"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid datetime string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [datetime]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("10:20:30.123456789"), l("HH:mm:ss.SSSSSSSSS"), randomZone()).makePipe() .asProcessor() .process(null) @@ -108,96 +109,96 @@ public void testDateTimeInvalidInputs() { assertEquals( "Invalid datetime string [10:20:30.123456789] or pattern [HH:mm:ss.SSSSSSSSS] is received; " + "Unable to convert parsed text into [datetime]", - siae.getMessage() + e.getMessage() ); } public void testTimeInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new TimeParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [10]", siae.getMessage()); + assertEquals("A string is required; received [10]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new TimeParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [20]", siae.getMessage()); + assertEquals("A string is required; received [20]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new TimeParse(Source.EMPTY, l("11:04:07"), l("invalid"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Invalid time string [11:04:07] or pattern [invalid] is received; Unknown pattern letter: i", siae.getMessage()); + assertEquals("Invalid time string [11:04:07] or pattern [invalid] is received; Unknown pattern letter: i", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new TimeParse(Source.EMPTY, l("11:04:07"), l("HH:mm"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid time string [11:04:07] or pattern [HH:mm] is received; " + "Text '11:04:07' could not be parsed, unparsed text found at index 5", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new TimeParse(Source.EMPTY, l("07/05/2020"), l("dd/MM/uuuu"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid time string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [time]", - siae.getMessage() + e.getMessage() ); } public void testDateInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [10]", siae.getMessage()); + assertEquals("A string is required; received [10]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [20]", siae.getMessage()); + assertEquals("A string is required; received [20]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("07/05/2020"), l("invalid"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Invalid date string [07/05/2020] or pattern [invalid] is received; Unknown pattern letter: i", siae.getMessage()); + assertEquals("Invalid date string [07/05/2020] or pattern [invalid] is received; Unknown pattern letter: i", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("07/05/2020"), l("dd/MM"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid date string [07/05/2020] or pattern [dd/MM] is received; " + "Text '07/05/2020' could not be parsed, unparsed text found at index 5", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("11:04:07"), l("HH:mm:ss"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid date string [11:04:07] or pattern [HH:mm:ss] is received; Unable to convert parsed text into [date]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("05/2020 11:04:07"), l("MM/uuuu HH:mm:ss"), randomZone()).makePipe() .asProcessor() .process(null) ); assertEquals( "Invalid date string [05/2020 11:04:07] or pattern [MM/uuuu HH:mm:ss] is received; Unable to convert parsed text into [date]", - siae.getMessage() + e.getMessage() ); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java index 3ddddb4392d3f..f49c7b76d0d8f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -76,45 +77,45 @@ public void testInvalidInputs() { TemporalAmount duration = Duration.ofDays(42).plusHours(12).plusMinutes(23).plusSeconds(12).plusNanos(143000000); Literal dayToSecond = intervalLiteral(duration, INTERVAL_DAY_TO_SECOND); - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("days"), l("foo"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime/interval is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime/interval is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "A value of [MILLENNIUM, CENTURY, DECADE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("dacede"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?", siae.getMessage()); + assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("weeks"), yearToMonth, null).makePipe().asProcessor().process(null) ); - assertEquals("Truncating intervals is not supported for weeks units", siae.getMessage()); + assertEquals("Truncating intervals is not supported for weeks units", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("week"), dayToSecond, null).makePipe().asProcessor().process(null) ); - assertEquals("Truncating intervals is not supported for week units", siae.getMessage()); + assertEquals("Truncating intervals is not supported for week units", e.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java index 5eb87ae736e88..d54580098fa3c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.geo; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.geo.GeoShape; @@ -28,17 +29,17 @@ public void testApply() { public void testTypeCheck() { StWkttosqlProcessor procPoint = new StWkttosqlProcessor(); - QlIllegalArgumentException siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process(42)); - assertEquals("A string is required; received [42]", siae.getMessage()); + Exception e = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process(42)); + assertEquals("A string is required; received [42]", e.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process("some random string")); - assertEquals("Cannot parse [some random string] as a geo_shape value", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> procPoint.process("some random string")); + assertEquals("Cannot parse [some random string] as a geo_shape value", e.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process("point (foo bar)")); - assertEquals("Cannot parse [point (foo bar)] as a geo_shape or shape value", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> procPoint.process("point (foo bar)")); + assertEquals("Cannot parse [point (foo bar)] as a geo_shape or shape value", e.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process("point (10 10")); - assertEquals("Cannot parse [point (10 10] as a geo_shape or shape value", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> procPoint.process("point (10 10")); + assertEquals("Cannot parse [point (10 10] as a geo_shape or shape value", e.getMessage()); } public void testCoerce() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java index 3049567d66905..e1e236413d58e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; import java.util.Arrays; @@ -17,17 +17,17 @@ public class MathOperationTests extends ESTestCase { public void testAbsLongMax() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> MathOperation.ABS.apply(Long.MIN_VALUE)); + InvalidArgumentException ex = expectThrows(InvalidArgumentException.class, () -> MathOperation.ABS.apply(Long.MIN_VALUE)); assertTrue(ex.getMessage().contains("cannot be negated")); } public void testAbsIntegerMax() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> MathOperation.ABS.apply(Integer.MIN_VALUE)); + InvalidArgumentException ex = expectThrows(InvalidArgumentException.class, () -> MathOperation.ABS.apply(Integer.MIN_VALUE)); assertTrue(ex.getMessage().contains("cannot be negated")); } public void testAbsShortMax() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> MathOperation.ABS.apply(Short.MIN_VALUE)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> MathOperation.ABS.apply(Short.MIN_VALUE)); assertTrue(ex.getMessage().contains("out of")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java index c0e436cc48900..e4fa7dab1db9c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -62,35 +63,32 @@ public void testLeftFunctionWithEdgeCases() { } public void testLeftFunctionInputValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Left(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Left(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [count]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [count]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Left(EMPTY, l("foo"), l((long) Integer.MIN_VALUE - 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Left(EMPTY, l("foo"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, - () -> new Left(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null) - ); - assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> new Left(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); + assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); } public void testRightFunctionWithValidInput() { @@ -111,35 +109,32 @@ public void testRightFunctionWithEdgeCases() { } public void testRightFunctionInputValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Right(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Right(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [count]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [count]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Right(EMPTY, l("foo"), l((long) Integer.MIN_VALUE - 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Right(EMPTY, l("foo"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, - () -> new Right(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null) - ); - assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> new Right(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); + assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); } public void testRepeatFunctionWithValidInput() { @@ -158,34 +153,31 @@ public void testRepeatFunctionWithEdgeCases() { } public void testRepeatFunctionInputsValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Repeat(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [count]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [count]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l((long) Integer.MIN_VALUE - 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, - () -> new Repeat(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null) - ); - assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); + assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java index b938876fc9817..46beb99eb9a70 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -68,59 +69,59 @@ public void testInsertWithEdgeCases() { } public void testInsertInputsValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Insert(EMPTY, l(5), l(1), l(3), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l(3), l(66)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [66]", siae.getMessage()); + assertEquals("A string/char is required; received [66]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l("c"), l(3), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [start]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [start]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l('z'), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [length]; received [java.lang.Character]", siae.getMessage()); + assertEquals("A fixed point number is required for [length]; received [java.lang.Character]", e.getMessage()); assertEquals( "baroobar", new Insert(EMPTY, l("foobar"), l(Integer.MIN_VALUE + 1), l(1), l("bar")).makePipe().asProcessor().process(null) ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobarbar"), l(Integer.MIN_VALUE), l(1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", e.getMessage()); assertEquals("foobar", new Insert(EMPTY, l("foobar"), l(Integer.MAX_VALUE), l(1), l("bar")).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l((long) Integer.MAX_VALUE + 1), l(1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", e.getMessage()); assertEquals("barfoobar", new Insert(EMPTY, l("foobar"), l(1), l(0), l("bar")).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l(-1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", e.getMessage()); assertEquals("bar", new Insert(EMPTY, l("foobar"), l(1), l(Integer.MAX_VALUE), l("bar")).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l((long) Integer.MAX_VALUE + 1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java index 3b8098978f01f..66124d2b1dccf 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -69,36 +70,36 @@ public void testLocateFunctionWithEdgeCasesInputs() { } public void testLocateFunctionValidatingInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Locate(EMPTY, l(5), l("foobarbar"), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new Locate(EMPTY, l("foo"), l(1), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [1]", siae.getMessage()); + assertEquals("A string/char is required; received [1]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Locate(EMPTY, l("foobarbar"), l("bar"), l('c')).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [start]; received [java.lang.Character]", siae.getMessage()); + assertEquals("A fixed point number is required for [start]; received [java.lang.Character]", e.getMessage()); assertEquals(4, new Locate(EMPTY, l("bar"), l("foobarbar"), l(Integer.MIN_VALUE + 1)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Locate(EMPTY, l("bar"), l("foobarbar"), l(Integer.MIN_VALUE)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", e.getMessage()); assertEquals(0, new Locate(EMPTY, l("bar"), l("foobarbar"), l(Integer.MAX_VALUE)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Locate(EMPTY, l("bar"), l("foobarbar"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java index 2ce90e121b0e7..3e98afb36fd9c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -64,50 +65,50 @@ public void testSubstringFunctionWithEdgeCases() { } public void testSubstringFunctionInputsValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Substring(EMPTY, l(5), l(1), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(1), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [length]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [length]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l("bar"), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [start]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [start]; received [java.lang.String]", e.getMessage()); assertEquals("f", new Substring(EMPTY, l("foobarbar"), l(Integer.MIN_VALUE + 1), l(1)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(Integer.MIN_VALUE), l(1)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", e.getMessage()); assertEquals("", new Substring(EMPTY, l("foobarbar"), l(Integer.MAX_VALUE), l(1)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l((long) Integer.MAX_VALUE + 1), l(1)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", e.getMessage()); assertEquals("", new Substring(EMPTY, l("foobarbar"), l(1), l(0)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(1), l(-1)).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", e.getMessage()); assertEquals("foobarbar", new Substring(EMPTY, l("foobarbar"), l(1), l(Integer.MAX_VALUE)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(1), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java index 09f814da8e5ef..00c13bbdd801a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.type.DataType; @@ -24,7 +25,6 @@ import static org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Arithmetics.mod; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; -import static org.elasticsearch.xpack.ql.util.NumericUtils.UNSIGNED_LONG_MAX; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.INTERVAL_DAY_TO_HOUR; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.INTERVAL_HOUR; @@ -252,13 +252,13 @@ public void testMulIntegerIntervalYearMonthOverflow() { public void testMulLongIntervalYearMonthOverflow() { Literal l = interval(Period.ofYears(1), INTERVAL_YEAR); - QlIllegalArgumentException expect = expectThrows(QlIllegalArgumentException.class, () -> mul(l, L(Long.MAX_VALUE))); + Exception expect = expectThrows(InvalidArgumentException.class, () -> mul(l, L(Long.MAX_VALUE))); assertEquals("[9223372036854775807] out of [integer] range", expect.getMessage()); } public void testMulUnsignedLongIntervalYearMonthOverflow() { Literal l = interval(Period.ofYears(1), INTERVAL_YEAR); - QlIllegalArgumentException expect = expectThrows(QlIllegalArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); + Exception expect = expectThrows(InvalidArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); assertEquals("[18446744073709551615] out of [long] range", expect.getMessage()); } @@ -270,7 +270,7 @@ public void testMulLongIntervalDayTimeOverflow() { public void testMulUnsignedLongIntervalDayTimeOverflow() { Literal l = interval(Duration.ofDays(1), INTERVAL_DAY); - QlIllegalArgumentException expect = expectThrows(QlIllegalArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); + Exception expect = expectThrows(InvalidArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); assertEquals("[18446744073709551615] out of [long] range", expect.getMessage()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 32cfdc158d24d..7bfb9b2e7a9f8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expression.TypeResolution; @@ -349,11 +349,8 @@ public void testNullFoldingIsNullWithCast() { final IsNull isNullOpt = (IsNull) foldNull.rule(isNull); assertEquals(isNull, isNullOpt); - QlIllegalArgumentException sqlIAE = expectThrows( - QlIllegalArgumentException.class, - () -> isNullOpt.asPipe().asProcessor().process(null) - ); - assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", sqlIAE.getMessage()); + Exception e = expectThrows(InvalidArgumentException.class, () -> isNullOpt.asPipe().asProcessor().process(null)); + assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", e.getMessage()); isNull = new IsNull(EMPTY, new Cast(EMPTY, NULL, randomFrom(DataTypes.types()))); assertTrue((Boolean) ((IsNull) foldNull.rule(isNull)).asPipe().asProcessor().process(null)); @@ -377,11 +374,8 @@ public void testNullFoldingIsNotNullWithCast() { final IsNotNull isNotNullOpt = (IsNotNull) foldNull.rule(isNotNull); assertEquals(isNotNull, isNotNullOpt); - QlIllegalArgumentException sqlIAE = expectThrows( - QlIllegalArgumentException.class, - () -> isNotNullOpt.asPipe().asProcessor().process(null) - ); - assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", sqlIAE.getMessage()); + Exception e = expectThrows(InvalidArgumentException.class, () -> isNotNullOpt.asPipe().asProcessor().process(null)); + assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", e.getMessage()); isNotNull = new IsNotNull(EMPTY, new Cast(EMPTY, NULL, randomFrom(DataTypes.types()))); assertFalse((Boolean) ((IsNotNull) foldNull.rule(isNotNull)).asPipe().asProcessor().process(null)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java index 405ddbf4779fb..80907230e828f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.type; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Source; @@ -97,7 +97,7 @@ public void testConversionToLong() { assertEquals(10L, conversion.convert(10.0)); assertEquals(10L, conversion.convert(10.1)); assertEquals(11L, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -139,7 +139,7 @@ public void testConversionToLong() { assertNull(conversion.convert(null)); assertEquals(1L, conversion.convert("1")); assertEquals(0L, conversion.convert("-0")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [long]", e.getMessage()); } } @@ -152,7 +152,7 @@ public void testConversionToDate() { assertEquals(date(10L), conversion.convert(10.0)); assertEquals(date(10L), conversion.convert(10.1)); assertEquals(date(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -162,7 +162,7 @@ public void testConversionToDate() { assertEquals(date(bi.longValue()), conversion.convert(bi)); BigInteger tooLarge = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(tooLarge)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(tooLarge)); assertEquals("[" + tooLarge + "] out of [long] range", e.getMessage()); } { @@ -213,15 +213,15 @@ public void testConversionToDate() { Converter forward = converterFor(DATE, KEYWORD); Converter back = converterFor(KEYWORD, DATE); assertEquals(asDateOnly(zdt), back.convert(forward.convert(zdt))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [date]: Text '0xff' could not be parsed at index 0", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("2020-02-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("2020-02-")); assertEquals("cannot cast [2020-02-] to [date]: Text '2020-02-' could not be parsed at index 8", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("2020-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("2020-")); assertEquals("cannot cast [2020-] to [date]: Text '2020-' could not be parsed at index 5", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-2020-02-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-2020-02-")); assertEquals("cannot cast [-2020-02-] to [date]: Text '-2020-02-' could not be parsed at index 9", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-2020-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-2020-")); assertEquals("cannot cast [-2020-] to [date]: Text '-2020-' could not be parsed at index 6", e.getMessage()); } } @@ -234,7 +234,7 @@ public void testConversionToTime() { assertEquals(time(10L), conversion.convert(10.0)); assertEquals(time(10L), conversion.convert(10.1)); assertEquals(time(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -244,7 +244,7 @@ public void testConversionToTime() { assertEquals(time(bi.longValue()), conversion.convert(bi)); BigInteger tooLarge = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(tooLarge)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(tooLarge)); assertEquals("[" + tooLarge + "] out of [long] range", e.getMessage()); } { @@ -285,7 +285,7 @@ public void testConversionToTime() { Converter forward = converterFor(TIME, KEYWORD); Converter back = converterFor(KEYWORD, TIME); assertEquals(ot, back.convert(forward.convert(ot))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [time]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } @@ -298,7 +298,7 @@ public void testConversionToDateTime() { assertEquals(dateTime(10L), conversion.convert(10.0)); assertEquals(dateTime(10L), conversion.convert(10.1)); assertEquals(dateTime(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -349,7 +349,7 @@ public void testConversionToDateTime() { Converter forward = converterFor(DATETIME, KEYWORD); Converter back = converterFor(KEYWORD, DATETIME); assertEquals(dt, back.convert(forward.convert(dt))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [datetime]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } @@ -403,7 +403,7 @@ public void testConversionToFloat() { assertEquals(1.0f, (float) conversion.convert("1"), 0); assertEquals(0.0f, (float) conversion.convert("-0"), 0); assertEquals(12.776f, (float) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [float]", e.getMessage()); } } @@ -457,7 +457,7 @@ public void testConversionToDouble() { assertEquals(1.0, (double) conversion.convert("1"), 0); assertEquals(0.0, (double) conversion.convert("-0"), 0); assertEquals(12.776, (double) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [double]", e.getMessage()); } } @@ -522,17 +522,17 @@ public void testConversionToBoolean() { assertEquals(true, conversion.convert("True")); assertEquals(false, conversion.convert("fAlSe")); // Everything else should fail - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10")); assertEquals("cannot cast [10] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-1")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-1")); assertEquals("cannot cast [-1] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0")); assertEquals("cannot cast [0] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("blah")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("blah")); assertEquals("cannot cast [blah] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("Yes")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("Yes")); assertEquals("cannot cast [Yes] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("nO")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("nO")); assertEquals("cannot cast [nO] to [boolean]", e.getMessage()); } } @@ -548,7 +548,7 @@ public void testConversionToUnsignedLong() { assertEquals(BigInteger.valueOf(zdt.toEpochSecond() * 1000), conversion.convert(zdt)); ZonedDateTime zdtn = asDateOnly(-l); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(zdtn)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(zdtn)); assertEquals("[" + zdtn.toEpochSecond() * 1000 + "] out of [unsigned_long] range", e.getMessage()); } { @@ -569,7 +569,7 @@ public void testConversionToInt() { assertEquals(10, conversion.convert(10.0)); assertEquals(10, conversion.convert(10.1)); assertEquals(11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } { @@ -579,7 +579,7 @@ public void testConversionToInt() { assertEquals(86400000, conversion.convert(asDateOnly(123456789L))); assertEquals(172800000, conversion.convert(asDateOnly(223456789L))); assertEquals(-172800000, conversion.convert(asDateOnly(-123456789L))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(Long.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateOnly(Long.MAX_VALUE))); assertEquals("[9223372036828800000] out of [integer] range", e.getMessage()); } { @@ -600,7 +600,7 @@ public void testConversionToInt() { // Nanos are ignored, only millis are used assertEquals(62123, conversion.convert(asDateTimeWithNanos("1970-01-01T00:01:02.123456789Z"))); Exception e = expectThrows( - QlIllegalArgumentException.class, + InvalidArgumentException.class, () -> conversion.convert(DateUtils.asDateTimeWithMillis(Long.MAX_VALUE)) ); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); @@ -615,23 +615,23 @@ public void testConversionToShort() { assertEquals((short) 10, conversion.convert(10.0)); assertEquals((short) 10, conversion.convert(10.1)); assertEquals((short) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } { Converter conversion = converterFor(DATE, to); assertNull(conversion.convert(null)); assertEquals((short) 0, conversion.convert(asDateOnly(12345678L))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); assertEquals("[86400000] out of [short] range", e.getMessage()); } { Converter conversion = converterFor(TIME, to); assertNull(conversion.convert(null)); assertEquals((short) 12345, conversion.convert(asTimeOnly(12345L))); - Exception e1 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(-123456789L))); + Exception e1 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(-123456789L))); assertEquals("[49343211] out of [short] range", e1.getMessage()); - Exception e2 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); + Exception e2 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); assertEquals("[37056789] out of [short] range", e2.getMessage()); } { @@ -642,7 +642,7 @@ public void testConversionToShort() { // Nanos are ignored, only millis are used assertEquals((short) 1123, conversion.convert(asDateTimeWithNanos("1970-01-01T00:00:01.123456789Z"))); Exception e = expectThrows( - QlIllegalArgumentException.class, + InvalidArgumentException.class, () -> conversion.convert(DateUtils.asDateTimeWithMillis(Integer.MAX_VALUE)) ); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); @@ -657,23 +657,23 @@ public void testConversionToByte() { assertEquals((byte) 10, conversion.convert(10.0)); assertEquals((byte) 10, conversion.convert(10.1)); assertEquals((byte) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); assertEquals("[" + Short.MAX_VALUE + "] out of [byte] range", e.getMessage()); } { Converter conversion = converterFor(DATE, to); assertNull(conversion.convert(null)); assertEquals((byte) 0, conversion.convert(asDateOnly(12345678L))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); assertEquals("[86400000] out of [byte] range", e.getMessage()); } { Converter conversion = converterFor(TIME, to); assertNull(conversion.convert(null)); assertEquals((byte) 123, conversion.convert(asTimeOnly(123L))); - Exception e1 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(-123L))); + Exception e1 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(-123L))); assertEquals("[86399877] out of [byte] range", e1.getMessage()); - Exception e2 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); + Exception e2 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); assertEquals("[37056789] out of [byte] range", e2.getMessage()); } { @@ -684,7 +684,7 @@ public void testConversionToByte() { // Nanos are ignored, only millis are used assertEquals((byte) 123, conversion.convert(asDateTimeWithNanos("1970-01-01T00:00:00.123456789Z"))); Exception e = expectThrows( - QlIllegalArgumentException.class, + InvalidArgumentException.class, () -> conversion.convert(DateUtils.asDateTimeWithMillis(Integer.MAX_VALUE)) ); assertEquals("[" + Integer.MAX_VALUE + "] out of [byte] range", e.getMessage()); @@ -764,7 +764,7 @@ public void testStringToIp() { Converter conversion = converterFor(KEYWORD, IP); assertNull(conversion.convert(null)); assertEquals("192.168.1.1", conversion.convert("192.168.1.1")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10.1.1.300")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10.1.1.300")); assertEquals("[10.1.1.300] is not a valid IPv4 or IPv6 address", e.getMessage()); } diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java index 1009a8460a32a..e8c3250bf2e46 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java @@ -35,6 +35,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import java.util.function.Supplier; import static java.util.Collections.emptyList; @@ -118,13 +119,17 @@ public void cleanup() throws Exception { if (isWaitForPendingTasks()) { // This waits for pending tasks to complete, so must go last (otherwise // it could be waiting for pending tasks while monitoring is still running). - waitForPendingTasks(adminClient(), task -> { - // Don't check rollup jobs because we clear them in the superclass. - return task.contains(RollupJob.NAME); - }); + waitForPendingTasks(adminClient(), waitForPendingTasksFilter()); } } + protected Predicate waitForPendingTasksFilter() { + return task -> { + // Don't check rollup jobs because we clear them in the superclass. + return task.contains(RollupJob.NAME); + }; + } + /** * Delete any left over machine learning datafeeds and jobs. */ diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml index 5726d75422e21..ca07b888ab5c1 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml @@ -124,3 +124,100 @@ setup: match: title: query: "title" + +--- +"Test pinned query with knn query": + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + aliases: + my_alias: + filter: + term: + my_name: v2 + my_alias1: + filter: + term: + my_name: v1 + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + pinned: + ids: [ 8, 9, 10 ] + organic: + knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], num_candidates: 5 } + - match: { hits.total.value: 8 } # 5 knn results + extra results from pinned query + - match: { hits.hits.0._id: "8" } + - match: { hits.hits.1._id: "9" } + - match: { hits.hits.2._id: "10" } + - match: { hits.hits.3._id: "1" } + - match: { hits.hits.4._id: "2" } + - match: { hits.hits.5._id: "3" } + - match: { hits.hits.6._id: "4" } + - match: { hits.hits.7._id: "5" } + + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + pinned: + ids: [ 3, 4, 5 ] + organic: + knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], num_candidates: 5 } + - match: { hits.total.value: 5 } + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.2._id: "5" } + - match: { hits.hits.3._id: "1" } + - match: { hits.hits.4._id: "2" } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java index 929150e916d21..db1a9ed9f8c16 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java @@ -50,7 +50,7 @@ import static org.elasticsearch.core.Strings.format; -public class ReportingAttachmentParser implements EmailAttachmentParser { +public final class ReportingAttachmentParser implements EmailAttachmentParser { public static final String TYPE = "reporting"; @@ -137,7 +137,6 @@ public static List> getSettings() { private boolean warningEnabled = REPORT_WARNING_ENABLED_SETTING.getDefault(Settings.EMPTY); private final Map customWarnings = new ConcurrentHashMap<>(1); - @SuppressWarnings("this-escape") public ReportingAttachmentParser( Settings settings, WebhookService webhookService, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java index 5d20db35dc7af..7586543d0869e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java @@ -19,7 +19,7 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.Strings.join; -public class DayTimes implements Times { +public final class DayTimes implements Times { public static final DayTimes NOON = new DayTimes("noon", new int[] { 12 }, new int[] { 0 }); public static final DayTimes MIDNIGHT = new DayTimes("midnight", new int[] { 0 }, new int[] { 0 }); @@ -36,12 +36,10 @@ public DayTimes(int hour, int minute) { this(new int[] { hour }, new int[] { minute }); } - @SuppressWarnings("this-escape") public DayTimes(int[] hour, int[] minute) { this(null, hour, minute); } - @SuppressWarnings("this-escape") DayTimes(String time, int[] hour, int[] minute) { this.time = time; this.hour = hour; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java index b01a786316e5d..37fb70a3e89f7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java @@ -22,7 +22,7 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.Strings.join; -public class MonthTimes implements Times { +public final class MonthTimes implements Times { public static final String LAST = "last_day"; public static final String FIRST = "first_day"; @@ -37,7 +37,6 @@ public MonthTimes() { this(DEFAULT_DAYS, DEFAULT_TIMES); } - @SuppressWarnings("this-escape") public MonthTimes(int[] days, DayTimes[] times) { this.days = days.length == 0 ? DEFAULT_DAYS : days; Arrays.sort(this.days); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java index a2091295820f9..55b1d494f8acb 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.Strings.join; -public class YearTimes implements Times { +public final class YearTimes implements Times { public static final EnumSet DEFAULT_MONTHS = EnumSet.of(Month.JANUARY); public static final int[] DEFAULT_DAYS = new int[] { 1 }; @@ -38,7 +38,6 @@ public YearTimes() { this(DEFAULT_MONTHS, DEFAULT_DAYS, DEFAULT_TIMES); } - @SuppressWarnings("this-escape") public YearTimes(EnumSet months, int[] days, DayTimes[] times) { this.months = months.isEmpty() ? DEFAULT_MONTHS : months; this.days = days.length == 0 ? DEFAULT_DAYS : days; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java index 359aa2b28f660..b6c1cebfa6569 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java @@ -22,12 +22,11 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class WatchExecutionContextMockBuilder { +public final class WatchExecutionContextMockBuilder { private final WatchExecutionContext ctx; private final Watch watch; - @SuppressWarnings("this-escape") public WatchExecutionContextMockBuilder(String watchId) { ctx = mock(WatchExecutionContext.class); watch = mock(Watch.class); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index b29dfa182cb2b..32347b5d2624b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.watcher.watch; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.search.SearchRequest; @@ -158,7 +156,6 @@ public class WatchTests extends ESTestCase { private TextTemplateEngine templateEngine; private HtmlSanitizer htmlSanitizer; private XPackLicenseState licenseState; - private Logger logger; private Settings settings = Settings.EMPTY; private WatcherSearchTemplateService searchTemplateService; @@ -172,7 +169,6 @@ public void init() throws Exception { templateEngine = mock(TextTemplateEngine.class); htmlSanitizer = mock(HtmlSanitizer.class); licenseState = mock(XPackLicenseState.class); - logger = LogManager.getLogger(WatchTests.class); searchTemplateService = mock(WatcherSearchTemplateService.class); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index ec206c64a2371..480704b89ca60 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -122,8 +122,7 @@ public TokenStreamComponents createComponents(String fieldName) { } }); - public static class PunctuationFoldingFilter extends TokenFilter { - @SuppressWarnings("this-escape") + public static final class PunctuationFoldingFilter extends TokenFilter { private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); /** @@ -137,7 +136,7 @@ public PunctuationFoldingFilter(TokenStream in) { } @Override - public final boolean incrementToken() throws IOException { + public boolean incrementToken() throws IOException { if (input.incrementToken()) { normalize(termAtt.buffer(), 0, termAtt.length()); return true; @@ -587,7 +586,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { throw new IllegalStateException("Invalid query type found parsing regex query:" + approxQuery); } - protected void getNgramTokens(Set tokens, String fragment) { + private void getNgramTokens(Set tokens, String fragment) { if (fragment.equals(TOKEN_START_STRING) || fragment.equals(TOKEN_END_STRING)) { // If a regex is a form of match-all e.g. ".*" we only produce the token start/end markers as search // terms which can be ignored. diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index b48d31358eaeb..2a9d761b7c3c1 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -11,15 +11,12 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.core.ShardsAcknowledgedResponse; import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; @@ -27,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.QueryBuilders; @@ -43,7 +41,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; @@ -75,12 +72,6 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } - @SuppressWarnings("removal") - protected static RestHighLevelClient highLevelClient(RestClient client) { - return new RestHighLevelClient(client, ignore -> {}, Collections.emptyList()) { - }; - } - public void testOldRepoAccess() throws IOException { runTest(false); } @@ -113,10 +104,7 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { int numDocs = 10; int extraDocs = 1; final Set expectedIds = new HashSet<>(); - try ( - RestHighLevelClient client = highLevelClient(adminClient()); - RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build() - ) { + try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { if (afterRestart == false) { beforeRestart( sourceOnlyRepository, @@ -126,7 +114,6 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { numDocs, extraDocs, expectedIds, - client, oldEs, indexName ); @@ -151,7 +138,6 @@ private void beforeRestart( int numDocs, int extraDocs, Set expectedIds, - RestHighLevelClient client, RestClient oldEs, String indexName ) throws IOException { @@ -262,35 +248,15 @@ private void beforeRestart( assertThat(getResp.evaluate("snapshots.0.stats.total.file_count"), greaterThan(0)); // restore / mount and check whether searches work - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + restoreMountAndVerify(numDocs, expectedIds, numberOfShards, sourceOnlyRepository, oldVersion, indexName, repoName, snapshotName); // close indices - assertTrue(closeIndex(client(), "restored_" + indexName).isShardsAcknowledged()); - assertTrue(closeIndex(client(), "mounted_full_copy_" + indexName).isShardsAcknowledged()); - assertTrue(closeIndex(client(), "mounted_shared_cache_" + indexName).isShardsAcknowledged()); + closeIndex(client(), "restored_" + indexName); + closeIndex(client(), "mounted_full_copy_" + indexName); + closeIndex(client(), "mounted_shared_cache_" + indexName); // restore / mount again - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + restoreMountAndVerify(numDocs, expectedIds, numberOfShards, sourceOnlyRepository, oldVersion, indexName, repoName, snapshotName); } private String getType(Version oldVersion, String id) { @@ -305,7 +271,6 @@ private static String sourceForDoc(int i) { private void restoreMountAndVerify( int numDocs, Set expectedIds, - RestHighLevelClient client, int numberOfShards, boolean sourceOnlyRepository, Version oldVersion, @@ -358,7 +323,7 @@ private void restoreMountAndVerify( } // run a search against the index - assertDocs("restored_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion, numberOfShards); + assertDocs("restored_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); // mount as full copy searchable snapshot Request mountRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_mount"); @@ -378,7 +343,7 @@ private void restoreMountAndVerify( ensureGreen("mounted_full_copy_" + indexName); // run a search against the index - assertDocs("mounted_full_copy_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion, numberOfShards); + assertDocs("mounted_full_copy_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); // mount as shared cache searchable snapshot mountRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_mount"); @@ -391,7 +356,7 @@ private void restoreMountAndVerify( assertEquals(numberOfShards, (int) mountResponse.evaluate("snapshot.shards.successful")); // run a search against the index - assertDocs("mounted_shared_cache_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion, numberOfShards); + assertDocs("mounted_shared_cache_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); } @SuppressWarnings("removal") @@ -399,7 +364,6 @@ private void assertDocs( String index, int numDocs, Set expectedIds, - RestHighLevelClient client, boolean sourceOnlyRepository, Version oldVersion, int numberOfShards @@ -410,8 +374,10 @@ private void assertDocs( .build(); RequestOptions randomRequestOptions = randomBoolean() ? RequestOptions.DEFAULT : v7RequestOptions; + SearchResponse searchResponse; + // run a search against the index - SearchResponse searchResponse = client.search(new SearchRequest(index), randomRequestOptions); + searchResponse = search(index, null, randomRequestOptions); logger.info(searchResponse); // check hit count assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); @@ -429,12 +395,11 @@ private void assertDocs( String id = randomFrom(expectedIds); int num = getIdAsNumeric(id); // run a search using runtime fields against the index - searchResponse = client.search( - new SearchRequest(index).source( - SearchSourceBuilder.searchSource() - .query(QueryBuilders.matchQuery("val", num)) - .runtimeMappings(Map.of("val", Map.of("type", "long"))) - ), + searchResponse = search( + index, + SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchQuery("val", num)) + .runtimeMappings(Map.of("val", Map.of("type", "long"))), randomRequestOptions ); logger.info(searchResponse); @@ -444,24 +409,24 @@ private void assertDocs( if (sourceOnlyRepository == false) { // search using reverse sort on val - searchResponse = client.search( - new SearchRequest(index).source( - SearchSourceBuilder.searchSource() - .query(QueryBuilders.matchAllQuery()) - .sort(SortBuilders.fieldSort("val").order(SortOrder.DESC)) - ), + searchResponse = search( + index, + SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()) + .sort(SortBuilders.fieldSort("val").order(SortOrder.DESC)), randomRequestOptions ); logger.info(searchResponse); // check sort order assertEquals( - expectedIds.stream().sorted(Comparator.comparingInt(this::getIdAsNumeric).reversed()).collect(Collectors.toList()), - Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getId).collect(Collectors.toList()) + expectedIds.stream().sorted(Comparator.comparingInt(this::getIdAsNumeric).reversed()).toList(), + Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getId).toList() ); // look up postings - searchResponse = client.search( - new SearchRequest(index).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("test", "test" + num))), + searchResponse = search( + index, + SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("test", "test" + num)), randomRequestOptions ); logger.info(searchResponse); @@ -472,8 +437,9 @@ private void assertDocs( // search on _type and check that results contain _type information String randomType = getType(oldVersion, randomFrom(expectedIds)); long typeCount = expectedIds.stream().filter(idd -> getType(oldVersion, idd).equals(randomType)).count(); - searchResponse = client.search( - new SearchRequest(index).source(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("_type", randomType))), + searchResponse = search( + index, + SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("_type", randomType)), randomRequestOptions ); logger.info(searchResponse); @@ -493,10 +459,9 @@ private void assertDocs( ); // check that shards are skipped based on non-matching date - searchResponse = client.search( - new SearchRequest(index).source( - SearchSourceBuilder.searchSource().query(QueryBuilders.rangeQuery("create_date").from("2020-02-01")) - ), + searchResponse = search( + index, + SearchSourceBuilder.searchSource().query(QueryBuilders.rangeQuery("create_date").from("2020-02-01")), randomRequestOptions ); logger.info(searchResponse); @@ -507,13 +472,22 @@ private void assertDocs( } } + private static SearchResponse search(String index, @Nullable SearchSourceBuilder builder, RequestOptions options) throws IOException { + Request request = new Request("POST", "/" + index + "/_search"); + if (builder != null) { + request.setJsonEntity(builder.toString()); + } + request.setOptions(options); + return SearchResponse.fromXContent(responseAsParser(client().performRequest(request))); + } + private int getIdAsNumeric(String id) { return Integer.parseInt(id.substring("testdoc".length())); } - static ShardsAcknowledgedResponse closeIndex(RestClient client, String index) throws IOException { + private static void closeIndex(RestClient client, String index) throws IOException { Request request = new Request("POST", "/" + index + "/_close"); - Response response = client.performRequest(request); - return ShardsAcknowledgedResponse.fromXContent(responseAsParser(response)); + ObjectPath doc = ObjectPath.createFromResponse(client.performRequest(request)); + assertTrue(doc.evaluate("shards_acknowledged")); } } diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java index d013ce143a673..839d21bbd3e8a 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java @@ -28,7 +28,7 @@ * (2) It performs role mapping to determine the roles for the looked-up user * (3) It caches the looked-up User objects */ -public class CustomRoleMappingRealm extends Realm implements CachingRealm { +public final class CustomRoleMappingRealm extends Realm implements CachingRealm { public static final String TYPE = "custom_role_mapping"; @@ -38,7 +38,6 @@ public class CustomRoleMappingRealm extends Realm implements CachingRealm { private final Cache cache; private final UserRoleMapper roleMapper; - @SuppressWarnings("this-escape") public CustomRoleMappingRealm(RealmConfig config, UserRoleMapper roleMapper) { super(config); this.cache = CacheBuilder.builder().build();