() {
@Override
public void execute(Task task) {
if (expectedOutputFile.exists() == false) {
- Path relativeOutputPath = project.getRootDir().toPath().relativize(expectedOutputFile.toPath());
+ Path relativeOutputPath = rootDir.toPath().relativize(expectedOutputFile.toPath());
final String message = "Building %s didn't generate expected artifact [%s]. The working branch may be "
+ "out-of-date - try merging in the latest upstream changes to the branch.";
throw new InvalidUserDataException(message.formatted(bwcVersion.get(), relativeOutputPath));
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
index 16c7bf6d32862..f92789f701049 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
@@ -23,16 +23,17 @@
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.util.GradleUtils;
import org.gradle.api.GradleException;
-import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Dependency;
+import org.gradle.api.artifacts.dsl.DependencyHandler;
import org.gradle.api.provider.Provider;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.function.Function;
-import static org.elasticsearch.gradle.util.GradleUtils.projectDependency;
-
/**
* An internal elasticsearch build plugin that registers additional
* distribution resolution strategies to the 'elasticsearch.download-distribution' plugin
@@ -64,18 +65,18 @@ public void apply(Project project) {
*
* BWC versions are resolved as project to projects under `:distribution:bwc`.
*/
- private void registerInternalDistributionResolutions(NamedDomainObjectContainer resolutions) {
- resolutions.register("localBuild", distributionResolution -> distributionResolution.setResolver((project, distribution) -> {
+ private void registerInternalDistributionResolutions(List resolutions) {
+ resolutions.add(new DistributionResolution("local-build", (project, distribution) -> {
if (isCurrentVersion(distribution)) {
// non-external project, so depend on local build
return new ProjectBasedDistributionDependency(
- config -> projectDependency(project, distributionProjectPath(distribution), config)
+ config -> projectDependency(project.getDependencies(), distributionProjectPath(distribution), config)
);
}
return null;
}));
- resolutions.register("bwc", distributionResolution -> distributionResolution.setResolver((project, distribution) -> {
+ resolutions.add(new DistributionResolution("bwc", (project, distribution) -> {
BwcVersions.UnreleasedVersionInfo unreleasedInfo = BuildParams.getBwcVersions()
.unreleasedInfo(Version.fromString(distribution.getVersion()));
if (unreleasedInfo != null) {
@@ -89,7 +90,7 @@ private void registerInternalDistributionResolutions(NamedDomainObjectContainer<
}
String projectConfig = getProjectConfig(distribution, unreleasedInfo);
return new ProjectBasedDistributionDependency(
- (config) -> projectDependency(project, unreleasedInfo.gradleProjectPath(), projectConfig)
+ (config) -> projectDependency(project.getDependencies(), unreleasedInfo.gradleProjectPath(), projectConfig)
);
}
return null;
@@ -116,6 +117,13 @@ private static String getProjectConfig(ElasticsearchDistribution distribution, B
}
}
+ private static Dependency projectDependency(DependencyHandler dependencyHandler, String projectPath, String projectConfig) {
+ Map depConfig = new HashMap<>();
+ depConfig.put("path", projectPath);
+ depConfig.put("configuration", projectConfig);
+ return dependencyHandler.project(depConfig);
+ }
+
private static String distributionProjectPath(ElasticsearchDistribution distribution) {
String projectPath = ":distribution";
if (distribution.getType() == ElasticsearchDistributionTypes.INTEG_TEST_ZIP) {
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/LegacyRestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/LegacyRestTestBasePlugin.java
index eacc5da6220ab..cf68d2928a793 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/LegacyRestTestBasePlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/LegacyRestTestBasePlugin.java
@@ -22,12 +22,18 @@
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
+import org.gradle.api.Task;
import org.gradle.api.plugins.JavaBasePlugin;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
+import org.gradle.api.specs.NotSpec;
+import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.Sync;
+import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.bundling.Zip;
+import java.util.Collections;
+
import javax.inject.Inject;
import static org.elasticsearch.gradle.internal.RestrictedBuildApiService.BUILD_API_RESTRICTIONS_SYS_PROPERTY;
@@ -47,6 +53,7 @@ public class LegacyRestTestBasePlugin implements Plugin {
private static final String TESTS_CLUSTER_REMOTE_ACCESS = "tests.cluster.remote_access";
private ProviderFactory providerFactory;
+ private Project project;
@Inject
public LegacyRestTestBasePlugin(ProviderFactory providerFactory) {
@@ -55,6 +62,7 @@ public LegacyRestTestBasePlugin(ProviderFactory providerFactory) {
@Override
public void apply(Project project) {
+ this.project = project;
Provider serviceProvider = project.getGradle()
.getSharedServices()
.registerIfAbsent("restrictedBuildAPI", RestrictedBuildApiService.class, spec -> {
@@ -97,6 +105,7 @@ public void apply(Project project) {
);
}
}
+ configureCacheability(restIntegTestTask);
});
project.getTasks()
@@ -121,6 +130,26 @@ public void apply(Project project) {
});
}
+ private void configureCacheability(RestIntegTestTask restIntegTestTask) {
+ TaskContainer tasks = project.getTasks();
+ Spec taskSpec = t -> tasks.withType(StandaloneRestIntegTestTask.class)
+ .stream()
+ .filter(task -> task != restIntegTestTask)
+ .anyMatch(task -> Collections.disjoint(task.getClusters(), restIntegTestTask.getClusters()) == false);
+ restIntegTestTask.getOutputs()
+ .doNotCacheIf(
+ "Caching disabled for this task since it uses a cluster shared by other tasks",
+ /*
+ * Look for any other tasks which use the same cluster as this task. Since tests often have side effects for the cluster
+ * they execute against, this state can cause issues when trying to cache tests results of tasks that share a cluster. To
+ * avoid any undesired behavior we simply disable the cache if we detect that this task uses a cluster shared between
+ * multiple tasks.
+ */
+ taskSpec
+ );
+ restIntegTestTask.getOutputs().upToDateWhen(new NotSpec(taskSpec));
+ }
+
private String systemProperty(String propName) {
return providerFactory.systemProperty(propName).getOrNull();
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java
index c602a50c2adb8..32e7f10d14355 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java
@@ -38,6 +38,8 @@
import org.gradle.api.artifacts.ProjectDependency;
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
import org.gradle.api.attributes.Attribute;
+import org.gradle.api.file.ConfigurableFileCollection;
+import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.tasks.ClasspathNormalizer;
@@ -134,16 +136,20 @@ public void apply(Project project) {
task.systemProperty("tests.system_call_filter", "false");
// Register plugins and modules as task inputs and pass paths as system properties to tests
- nonInputSystemProperties.systemProperty(TESTS_CLUSTER_MODULES_PATH_SYSPROP, modulesConfiguration::getAsPath);
- registerConfigurationInputs(task, modulesConfiguration);
- nonInputSystemProperties.systemProperty(TESTS_CLUSTER_PLUGINS_PATH_SYSPROP, pluginsConfiguration::getAsPath);
- registerConfigurationInputs(task, extractedPluginsConfiguration);
+ var modulePath = project.getObjects().fileCollection().from(modulesConfiguration);
+ nonInputSystemProperties.systemProperty(TESTS_CLUSTER_MODULES_PATH_SYSPROP, modulePath::getAsPath);
+ registerConfigurationInputs(task, modulesConfiguration.getName(), modulePath);
+ var pluginPath = project.getObjects().fileCollection().from(pluginsConfiguration);
+ nonInputSystemProperties.systemProperty(TESTS_CLUSTER_PLUGINS_PATH_SYSPROP, pluginPath::getAsPath);
+ registerConfigurationInputs(
+ task,
+ extractedPluginsConfiguration.getName(),
+ project.getObjects().fileCollection().from(extractedPluginsConfiguration)
+ );
// Wire up integ-test distribution by default for all test tasks
- nonInputSystemProperties.systemProperty(
- INTEG_TEST_DISTRIBUTION_SYSPROP,
- () -> integTestDistro.getExtracted().getSingleFile().getPath()
- );
+ FileCollection extracted = integTestDistro.getExtracted();
+ nonInputSystemProperties.systemProperty(INTEG_TEST_DISTRIBUTION_SYSPROP, () -> extracted.getSingleFile().getPath());
nonInputSystemProperties.systemProperty(TESTS_RUNTIME_JAVA_SYSPROP, BuildParams.getRuntimeJavaHome());
// Add `usesDefaultDistribution()` extension method to test tasks to indicate they require the default distro
@@ -216,15 +222,15 @@ private FileTree getDistributionFiles(ElasticsearchDistribution distribution, Ac
return distribution.getExtracted().getAsFileTree().matching(patternFilter);
}
- private void registerConfigurationInputs(Task task, Configuration configuration) {
+ private void registerConfigurationInputs(Task task, String configurationName, ConfigurableFileCollection configuration) {
task.getInputs()
.files(providerFactory.provider(() -> configuration.getAsFileTree().filter(f -> f.getName().endsWith(".jar") == false)))
- .withPropertyName(configuration.getName() + "-files")
+ .withPropertyName(configurationName + "-files")
.withPathSensitivity(PathSensitivity.RELATIVE);
task.getInputs()
.files(providerFactory.provider(() -> configuration.getAsFileTree().filter(f -> f.getName().endsWith(".jar"))))
- .withPropertyName(configuration.getName() + "-classpath")
+ .withPropertyName(configurationName + "-classpath")
.withNormalizer(ClasspathNormalizer.class);
}
diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy
index 6b662b8165034..719fae2b463c0 100644
--- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy
+++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy
@@ -34,7 +34,7 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest {
id 'elasticsearch.testclusters'
}
- class SomeClusterAwareTask extends DefaultTask implements TestClustersAware {
+ abstract class SomeClusterAwareTask extends DefaultTask implements TestClustersAware {
private Collection clusters = new HashSet<>();
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java
index d08dc469e5ba5..e12523870b15b 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java
@@ -11,6 +11,7 @@
import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
import org.elasticsearch.gradle.transform.SymbolicLinkPreservingUntarTransform;
import org.elasticsearch.gradle.transform.UnzipTransform;
+import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
@@ -22,7 +23,8 @@
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
-import java.util.Comparator;
+import java.util.ArrayList;
+import java.util.List;
import javax.inject.Inject;
@@ -42,9 +44,10 @@ public class DistributionDownloadPlugin implements Plugin {
private static final String DOWNLOAD_REPO_NAME = "elasticsearch-downloads";
private static final String SNAPSHOT_REPO_NAME = "elasticsearch-snapshots";
public static final String DISTRO_EXTRACTED_CONFIG_PREFIX = "es_distro_extracted_";
+ public static final String DISTRO_CONFIG_PREFIX = "es_distro_file_";
private NamedDomainObjectContainer distributionsContainer;
- private NamedDomainObjectContainer distributionsResolutionStrategiesContainer;
+ private List distributionsResolutionStrategies;
private Property dockerAvailability;
@@ -77,7 +80,7 @@ public void apply(Project project) {
private void setupDistributionContainer(Project project, Property dockerAvailable) {
distributionsContainer = project.container(ElasticsearchDistribution.class, name -> {
- Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name);
+ Configuration fileConfiguration = project.getConfigurations().create(DISTRO_CONFIG_PREFIX + name);
Configuration extractedConfiguration = project.getConfigurations().create(DISTRO_EXTRACTED_CONFIG_PREFIX + name);
extractedConfiguration.getAttributes()
.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE);
@@ -85,21 +88,17 @@ private void setupDistributionContainer(Project project, Property docke
name,
project.getObjects(),
dockerAvailability,
- fileConfiguration,
- extractedConfiguration,
- (dist) -> finalizeDistributionDependencies(project, dist)
+ project.getObjects().fileCollection().from(fileConfiguration),
+ project.getObjects().fileCollection().from(extractedConfiguration),
+ new FinalizeDistributionAction(distributionsResolutionStrategies, project)
);
});
project.getExtensions().add(CONTAINER_NAME, distributionsContainer);
}
private void setupResolutionsContainer(Project project) {
- distributionsResolutionStrategiesContainer = project.container(DistributionResolution.class);
- // We want this ordered in the same resolution strategies are added
- distributionsResolutionStrategiesContainer.whenObjectAdded(
- resolveDependencyNotation -> resolveDependencyNotation.setPriority(distributionsResolutionStrategiesContainer.size())
- );
- project.getExtensions().add(RESOLUTION_CONTAINER_NAME, distributionsResolutionStrategiesContainer);
+ distributionsResolutionStrategies = new ArrayList<>();
+ project.getExtensions().add(RESOLUTION_CONTAINER_NAME, distributionsResolutionStrategies);
}
@SuppressWarnings("unchecked")
@@ -108,30 +107,8 @@ public static NamedDomainObjectContainer getContainer
}
@SuppressWarnings("unchecked")
- public static NamedDomainObjectContainer getRegistrationsContainer(Project project) {
- return (NamedDomainObjectContainer) project.getExtensions().getByName(RESOLUTION_CONTAINER_NAME);
- }
-
- private void finalizeDistributionDependencies(Project project, ElasticsearchDistribution distribution) {
- DependencyHandler dependencies = project.getDependencies();
- // for the distribution as a file, just depend on the artifact directly
- DistributionDependency distributionDependency = resolveDependencyNotation(project, distribution);
- dependencies.add(distribution.configuration.getName(), distributionDependency.getDefaultNotation());
- // no extraction needed for rpm, deb or docker
- if (distribution.getType().shouldExtract()) {
- // The extracted configuration depends on the artifact directly but has
- // an artifact transform registered to resolve it as an unpacked folder.
- dependencies.add(distribution.getExtracted().getName(), distributionDependency.getExtractedNotation());
- }
- }
-
- private DistributionDependency resolveDependencyNotation(Project p, ElasticsearchDistribution distribution) {
- return distributionsResolutionStrategiesContainer.stream()
- .sorted(Comparator.comparingInt(DistributionResolution::getPriority))
- .map(r -> r.getResolver().resolve(p, distribution))
- .filter(d -> d != null)
- .findFirst()
- .orElseGet(() -> DistributionDependency.of(dependencyNotation(distribution)));
+ public static List getRegistrationsContainer(Project project) {
+ return (List) project.getExtensions().getByName(RESOLUTION_CONTAINER_NAME);
}
private static void addIvyRepo(Project project, String name, String url, String group) {
@@ -155,22 +132,53 @@ private static void setupDownloadServiceRepo(Project project) {
addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP);
}
- /**
- * Returns a dependency object representing the given distribution.
- *
- * The returned object is suitable to be passed to {@link DependencyHandler}.
- * The concrete type of the object will be a set of maven coordinates as a {@link String}.
- * Maven coordinates point to either the integ-test-zip coordinates on maven central, or a set of artificial
- * coordinates that resolve to the Elastic download service through an ivy repository.
- */
- private String dependencyNotation(ElasticsearchDistribution distribution) {
- if (distribution.getType() == ElasticsearchDistributionTypes.INTEG_TEST_ZIP) {
- return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip";
+ private record FinalizeDistributionAction(List resolutionList, Project project)
+ implements
+ Action {
+ @Override
+
+ public void execute(ElasticsearchDistribution distro) {
+ finalizeDistributionDependencies(project, distro);
+ }
+
+ private void finalizeDistributionDependencies(Project project, ElasticsearchDistribution distribution) {
+ // for the distribution as a file, just depend on the artifact directly
+ DistributionDependency distributionDependency = resolveDependencyNotation(project, distribution);
+ project.getDependencies().add(DISTRO_CONFIG_PREFIX + distribution.getName(), distributionDependency.getDefaultNotation());
+ // no extraction needed for rpm, deb or docker
+ if (distribution.getType().shouldExtract()) {
+ // The extracted configuration depends on the artifact directly but has
+ // an artifact transform registered to resolve it as an unpacked folder.
+ project.getDependencies()
+ .add(DISTRO_EXTRACTED_CONFIG_PREFIX + distribution.getName(), distributionDependency.getExtractedNotation());
+ }
+ }
+
+ private DistributionDependency resolveDependencyNotation(Project project, ElasticsearchDistribution distro) {
+ return resolutionList.stream()
+ .map(r -> r.getResolver().resolve(project, distro))
+ .filter(d -> d != null)
+ .findFirst()
+ .orElseGet(() -> DistributionDependency.of(dependencyNotation(distro)));
+ }
+
+ /**
+ * Returns a dependency object representing the given distribution.
+ *
+ * The returned object is suitable to be passed to {@link DependencyHandler}.
+ * The concrete type of the object will be a set of maven coordinates as a {@link String}.
+ * Maven coordinates point to either the integ-test-zip coordinates on maven central, or a set of artificial
+ * coordinates that resolve to the Elastic download service through an ivy repository.
+ */
+ private String dependencyNotation(ElasticsearchDistribution distribution) {
+ if (distribution.getType() == ElasticsearchDistributionTypes.INTEG_TEST_ZIP) {
+ return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip";
+ }
+ Version distroVersion = Version.fromString(distribution.getVersion());
+ String extension = distribution.getType().getExtension(distribution.getPlatform());
+ String classifier = distribution.getType().getClassifier(distribution.getPlatform(), distroVersion);
+ String group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP;
+ return group + ":elasticsearch" + ":" + distribution.getVersion() + classifier + "@" + extension;
}
- Version distroVersion = Version.fromString(distribution.getVersion());
- String extension = distribution.getType().getExtension(distribution.getPlatform());
- String classifier = distribution.getType().getClassifier(distribution.getPlatform(), distroVersion);
- String group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP;
- return group + ":elasticsearch" + ":" + distribution.getVersion() + classifier + "@" + extension;
}
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionResolution.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionResolution.java
index 3b82c9f6975a0..0d8177dea5cb6 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionResolution.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionResolution.java
@@ -12,9 +12,14 @@
public class DistributionResolution {
private Resolver resolver;
- private String name;
+ private final String name;
private int priority;
+ public DistributionResolution(String name, Resolver resolver) {
+ this(name);
+ this.resolver = resolver;
+ }
+
public DistributionResolution(String name) {
this.name = name;
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
index 5350b6698cb30..f9805680ce8d4 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
@@ -11,7 +11,8 @@
import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
import org.gradle.api.Action;
import org.gradle.api.Buildable;
-import org.gradle.api.artifacts.Configuration;
+import org.gradle.api.file.ConfigurableFileCollection;
+import org.gradle.api.file.FileCollection;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.TaskDependency;
@@ -44,7 +45,7 @@ public String toString() {
private final String name;
private final Property dockerAvailability;
// pkg private so plugin can configure
- final Configuration configuration;
+ final FileCollection configuration;
private final Property architecture;
private final Property version;
@@ -52,7 +53,7 @@ public String toString() {
private final Property platform;
private final Property bundledJdk;
private final Property failIfUnavailable;
- private final Configuration extracted;
+ private final ConfigurableFileCollection extracted;
private Action distributionFinalizer;
private boolean frozen = false;
@@ -60,8 +61,8 @@ public String toString() {
String name,
ObjectFactory objectFactory,
Property dockerAvailability,
- Configuration fileConfiguration,
- Configuration extractedConfiguration,
+ ConfigurableFileCollection fileConfiguration,
+ ConfigurableFileCollection extractedConfiguration,
Action distributionFinalizer
) {
this.name = name;
@@ -172,7 +173,7 @@ public String getFilepath() {
return configuration.getSingleFile().toString();
}
- public Configuration getExtracted() {
+ public ConfigurableFileCollection getExtracted() {
if (getType().shouldExtract() == false) {
throw new UnsupportedOperationException(
"distribution type [" + getType().getName() + "] for " + "elasticsearch distribution [" + name + "] cannot be extracted"
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java
index 5c98ab3bf4364..e80d2ed64cabd 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java
@@ -12,7 +12,7 @@
import java.util.Collection;
import java.util.HashSet;
-public class DefaultTestClustersTask extends DefaultTask implements TestClustersAware {
+public abstract class DefaultTestClustersTask extends DefaultTask implements TestClustersAware {
private Collection clusters = new HashSet<>();
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
index 2bd8219dc48e5..ba2a5a20c4fbb 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
@@ -8,11 +8,9 @@
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.FileSystemOperationsAware;
-import org.gradle.api.Task;
+import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.services.internal.BuildServiceProvider;
import org.gradle.api.services.internal.BuildServiceRegistryInternal;
-import org.gradle.api.specs.NotSpec;
-import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.CacheableTask;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Nested;
@@ -28,6 +26,8 @@
import java.util.HashSet;
import java.util.List;
+import javax.inject.Inject;
+
import static org.elasticsearch.gradle.testclusters.TestClustersPlugin.THROTTLE_SERVICE_NAME;
/**
@@ -42,23 +42,6 @@ public abstract class StandaloneRestIntegTestTask extends Test implements TestCl
private boolean debugServer = false;
public StandaloneRestIntegTestTask() {
- Spec taskSpec = t -> getProject().getTasks()
- .withType(StandaloneRestIntegTestTask.class)
- .stream()
- .filter(task -> task != this)
- .anyMatch(task -> Collections.disjoint(task.getClusters(), getClusters()) == false);
- this.getOutputs()
- .doNotCacheIf(
- "Caching disabled for this task since it uses a cluster shared by other tasks",
- /*
- * Look for any other tasks which use the same cluster as this task. Since tests often have side effects for the cluster
- * they execute against, this state can cause issues when trying to cache tests results of tasks that share a cluster. To
- * avoid any undesired behavior we simply disable the cache if we detect that this task uses a cluster shared between
- * multiple tasks.
- */
- taskSpec
- );
- this.getOutputs().upToDateWhen(new NotSpec(taskSpec));
this.getOutputs()
.doNotCacheIf(
"Caching disabled for this task since it is configured to preserve data directory",
@@ -79,6 +62,10 @@ public Collection getClusters() {
return clusters;
}
+ @Override
+ @Inject
+ public abstract ProviderFactory getProviderFactory();
+
@Override
@Internal
public List getSharedResources() {
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
index 9537162b5d109..09066d4b26e88 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
@@ -9,17 +9,24 @@
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
+import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
+import org.gradle.api.services.ServiceReference;
import org.gradle.api.tasks.Nested;
import java.util.Collection;
import java.util.concurrent.Callable;
+import static org.elasticsearch.gradle.testclusters.TestClustersPlugin.REGISTRY_SERVICE_NAME;
+
public interface TestClustersAware extends Task {
@Nested
Collection getClusters();
+ @ServiceReference(REGISTRY_SERVICE_NAME)
+ Property getRegistery();
+
default void useCluster(ElasticsearchCluster cluster) {
if (cluster.getPath().equals(getProject().getPath()) == false) {
throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster);
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java
index 72a462c3cd8c9..d2ccda1c1f8c7 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java
@@ -37,6 +37,7 @@
import java.io.File;
import java.util.HashMap;
import java.util.Map;
+import java.util.Set;
import java.util.function.Function;
import javax.inject.Inject;
@@ -49,7 +50,7 @@ public class TestClustersPlugin implements Plugin {
public static final String THROTTLE_SERVICE_NAME = "testClustersThrottle";
private static final String LIST_TASK_NAME = "listTestClusters";
- private static final String REGISTRY_SERVICE_NAME = "testClustersRegistry";
+ public static final String REGISTRY_SERVICE_NAME = "testClustersRegistry";
private static final Logger logger = Logging.getLogger(TestClustersPlugin.class);
private final ProviderFactory providerFactory;
private Provider runtimeJavaProvider;
@@ -222,13 +223,21 @@ private void configureStartClustersHook(
testClusterTasksService.get().register(awareTask.getPath(), awareTask);
awareTask.doFirst(task -> {
awareTask.beforeStart();
- awareTask.getClusters().forEach(registry::maybeStartCluster);
+ awareTask.getClusters().forEach(awareTask.getRegistery().get()::maybeStartCluster);
});
});
});
}
}
+ public static void maybeStartCluster(ElasticsearchCluster cluster, Set runningClusters) {
+ if (runningClusters.contains(cluster)) {
+ return;
+ }
+ runningClusters.add(cluster);
+ cluster.start();
+ }
+
static public abstract class TaskEventsService implements BuildService, OperationCompletionListener {
Map tasksMap = new HashMap<>();
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java b/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java
index ce69c4ec476f9..00e5834b0f826 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java
@@ -13,7 +13,6 @@
import org.gradle.api.Task;
import org.gradle.api.UnknownTaskException;
import org.gradle.api.artifacts.Configuration;
-import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.ModuleDependency;
import org.gradle.api.artifacts.ProjectDependency;
import org.gradle.api.plugins.JavaBasePlugin;
@@ -34,7 +33,6 @@
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
@@ -183,16 +181,6 @@ public static void extendSourceSet(Project project, String parentSourceSetName,
}
}
- public static Dependency projectDependency(Project project, String projectPath, String projectConfig) {
- if (project.findProject(projectPath) == null) {
- throw new GradleException("no project [" + projectPath + "], project names: " + project.getRootProject().getAllprojects());
- }
- Map depConfig = new HashMap<>();
- depConfig.put("path", projectPath);
- depConfig.put("configuration", projectConfig);
- return project.getDependencies().project(depConfig);
- }
-
/**
* To calculate the project path from a task path without relying on Task#getProject() which is discouraged during
* task execution time.
diff --git a/build.gradle b/build.gradle
index d05c2bf53f660..0f11854dc57bf 100644
--- a/build.gradle
+++ b/build.gradle
@@ -29,8 +29,8 @@ plugins {
id 'lifecycle-base'
id 'elasticsearch.docker-support'
id 'elasticsearch.global-build-info'
- id 'elasticsearch.build-scan'
id 'elasticsearch.build-complete'
+ id 'elasticsearch.build-scan'
id 'elasticsearch.jdk-download'
id 'elasticsearch.internal-distribution-download'
id 'elasticsearch.runtime-jdk-provision'
diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/java-version-checker/build.gradle
index 39f9bbf536dda..0a47d0652e465 100644
--- a/distribution/tools/java-version-checker/build.gradle
+++ b/distribution/tools/java-version-checker/build.gradle
@@ -8,15 +8,17 @@ tasks.named(sourceSets.unsupportedJdkVersionEntrypoint.compileJavaTaskName).conf
targetCompatibility = JavaVersion.VERSION_1_8
}
+
tasks.named("jar") {
manifest {
attributes("Multi-Release": "true")
}
+ FileCollection mainOutput = sourceSets.main.output;
from(sourceSets.unsupportedJdkVersionEntrypoint.output)
eachFile { details ->
if (details.path.equals("org/elasticsearch/tools/java_version_checker/JavaVersionChecker.class") &&
- sourceSets.main.output.asFileTree.contains(details.file)) {
+ mainOutput.asFileTree.contains(details.file)) {
details.relativePath = details.relativePath.prepend("META-INF/versions/17")
}
}
diff --git a/docs/changelog/101333.yaml b/docs/changelog/101333.yaml
new file mode 100644
index 0000000000000..4452687b995d3
--- /dev/null
+++ b/docs/changelog/101333.yaml
@@ -0,0 +1,29 @@
+pr: 101333
+summary: Fixed JWT principal from claims
+area: Authorization
+type: breaking
+issues: []
+breaking:
+ title: Fixed JWT principal from claims
+ area: Authorization
+ details: "This changes the format of a JWT's principal before the JWT is actually\
+ \ validated by any JWT realm. The JWT's principal is a convenient way to refer\
+ \ to a JWT that has not yet been verified by a JWT realm. The JWT's principal\
+ \ is printed in the audit and regular logs (notably for auditing authn failures)\
+ \ as well as the smart realm chain reordering optimization. The JWT principal\
+ \ is NOT required to be identical to the JWT-authenticated user's principal, but\
+ \ in general, they should be similar. Previously, the JWT's principal was built\
+ \ by individual realms in the same way the realms built the authenticated user's\
+ \ principal. This had the advantage that, in simpler JWT realms configurations\
+ \ (e.g. a single JWT realm in the chain), the JWT principal and the authenticated\
+ \ user's principal are very similar. However the drawback is that, in general,\
+ \ the JWT principal and the user principal can be very different (i.e. in the\
+ \ case where one JWT realm builds the JWT principal and a different one builds\
+ \ the user principal). Another downside is that the (unauthenticated) JWT principal\
+ \ depended on realm ordering, which makes identifying the JWT from its principal\
+ \ dependent on the ES authn realm configuration. This PR implements a consistent\
+ \ fixed logic to build the JWT principal, which only depends on the JWT's claims\
+ \ and no ES configuration."
+ impact: "Users will observe changed format and values for the `user.name` attribute\
+ \ of `authentication_failed` audit log events, in the JWT (failed) authn case."
+ notable: false
diff --git a/docs/changelog/101476.yaml b/docs/changelog/101476.yaml
deleted file mode 100644
index ee4cd9b1e4b1a..0000000000000
--- a/docs/changelog/101476.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 101476
-summary: Mark legacy stack templates as deprecated
-area: Indices APIs
-type: enhancement
-issues: []
diff --git a/docs/changelog/101577.yaml b/docs/changelog/101577.yaml
new file mode 100644
index 0000000000000..e485fd3811cb6
--- /dev/null
+++ b/docs/changelog/101577.yaml
@@ -0,0 +1,5 @@
+pr: 101577
+summary: Add metrics to the shared blob cache
+area: Search
+type: enhancement
+issues: []
diff --git a/docs/changelog/101682.yaml b/docs/changelog/101682.yaml
new file mode 100644
index 0000000000000..e512006057581
--- /dev/null
+++ b/docs/changelog/101682.yaml
@@ -0,0 +1,5 @@
+pr: 101682
+summary: "Add manage_enrich cluster privilege to kibana_system role"
+area: Authentication
+type: enhancement
+issues: []
diff --git a/docs/changelog/101892.yaml b/docs/changelog/101892.yaml
new file mode 100644
index 0000000000000..175871de83d1a
--- /dev/null
+++ b/docs/changelog/101892.yaml
@@ -0,0 +1,6 @@
+pr: 101892
+summary: Dry up `AsyncTaskIndexService` memory management and fix inefficient circuit
+ breaker use
+area: Search
+type: bug
+issues: []
diff --git a/docs/changelog/101907.yaml b/docs/changelog/101907.yaml
new file mode 100644
index 0000000000000..022c061555be1
--- /dev/null
+++ b/docs/changelog/101907.yaml
@@ -0,0 +1,6 @@
+pr: 101907
+summary: Fail listener on exception in `TcpTransport#openConnection`
+area: Network
+type: bug
+issues:
+ - 100510
diff --git a/docs/changelog/101967.yaml b/docs/changelog/101967.yaml
new file mode 100644
index 0000000000000..84f188db1e30b
--- /dev/null
+++ b/docs/changelog/101967.yaml
@@ -0,0 +1,5 @@
+pr: 101967
+summary: "Fix incorrect dynamic mapping for non-numeric-value arrays #101965"
+area: Mapping
+type: bug
+issues: []
diff --git a/docs/changelog/101971.yaml b/docs/changelog/101971.yaml
new file mode 100644
index 0000000000000..23fb5463bae79
--- /dev/null
+++ b/docs/changelog/101971.yaml
@@ -0,0 +1,5 @@
+pr: 101971
+summary: Fix inference timeout from the Inference Ingest Processor
+area: Machine Learning
+type: bug
+issues: []
diff --git a/docs/changelog/101989.yaml b/docs/changelog/101989.yaml
new file mode 100644
index 0000000000000..d294d194bd4e8
--- /dev/null
+++ b/docs/changelog/101989.yaml
@@ -0,0 +1,5 @@
+pr: 101989
+summary: Add message field to `HealthPeriodicLogger` and `S3RequestRetryStats`
+area: Health
+type: enhancement
+issues: []
diff --git a/docs/changelog/101998.yaml b/docs/changelog/101998.yaml
new file mode 100644
index 0000000000000..be0e2d8c61ba3
--- /dev/null
+++ b/docs/changelog/101998.yaml
@@ -0,0 +1,5 @@
+pr: 101998
+summary: Avoid negative `DesiredBalanceStats#lastConvergedIndex`
+area: Allocation
+type: bug
+issues: []
diff --git a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc
index b5f1315531916..44a00b9f5b99e 100644
--- a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc
@@ -68,7 +68,7 @@ POST /_search
--------------------------------------------------
// TEST[setup:sales]
-<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-day intervals
+<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-month intervals
<2> A `sum` metric is used to calculate the sum of a field. This could be any numeric metric (sum, min, max, etc)
<3> Finally, we specify a `moving_fn` aggregation which uses "the_sum" metric as its input.
diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc
index 96103fc135271..3abe6a6df7e01 100644
--- a/docs/reference/esql/esql-limitations.asciidoc
+++ b/docs/reference/esql/esql-limitations.asciidoc
@@ -136,6 +136,27 @@ now() - 2023-10-26
include::esql-enrich-data.asciidoc[tag=limitations]
+[discrete]
+[[esql-limitations-dissect]]
+=== Dissect limitations
+
+include::esql-process-data-with-dissect-grok.asciidoc[tag=dissect-limitations]
+
+[discrete]
+[[esql-limitations-grok]]
+=== Grok limitations
+
+include::esql-process-data-with-dissect-grok.asciidoc[tag=grok-limitations]
+
+[discrete]
+[[esql-limitations-mv]]
+=== Multivalue limitations
+
+{esql} <>, but functions
+return `null` when applied to a multivalued field, unless documented otherwise.
+Work around this limitation by converting the field to single value with one of
+the <>.
+
[discrete]
[[esql-limitations-kibana]]
=== Kibana limitations
diff --git a/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc b/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc
index a37989b2b2da8..294ce52e18856 100644
--- a/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc
+++ b/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc
@@ -120,7 +120,6 @@ include::../ingest/processors/dissect.asciidoc[tag=dissect-key-modifiers]
| `+` | Append | left | `%{+keyname} %{+keyname}` | Appends two or more fields together | <>
| `+` with `/n` | Append with order | left and right | `%{+keyname/2} %{+keyname/1}` | Appends two or more fields together in the order specified | <>
| `?` | Named skip key | left | `%{?ignoreme}` | Skips the matched value in the output. Same behavior as `%{}`| <>
-| `*` and `&` | Reference keys | left | `%{*r1} %{&r1}` | Sets the output key as value of `*` and output value of `&` | <>
|======
[[esql-dissect-modifier-skip-right-padding]]
@@ -139,9 +138,13 @@ include::../ingest/processors/dissect.asciidoc[tag=append-order-modifier]
====== Named skip key (`?`)
include::../ingest/processors/dissect.asciidoc[tag=named-skip-key]
-[[esql-reference-keys]]
-====== Reference keys (`*` and `&`)
-include::../ingest/processors/dissect.asciidoc[tag=reference-keys]
+[[esql-dissect-limitations]]
+===== Limitations
+
+// tag::dissect-limitations[]
+The `DISSECT` command does not support
+<>.
+// end::dissect-limitations[]
[[esql-process-data-with-grok]]
==== Process data with `GROK`
@@ -253,6 +256,8 @@ as the `GROK` command.
[[esql-grok-limitations]]
===== Limitations
+// tag::grok-limitations[]
The `GROK` command does not support configuring <>, or <>. The `GROK` command is not
subject to <>.
+// end::grok-limitations[]
\ No newline at end of file
diff --git a/docs/reference/esql/esql-using.asciidoc b/docs/reference/esql/esql-using.asciidoc
index dbab521ead4d1..235c7defe559b 100644
--- a/docs/reference/esql/esql-using.asciidoc
+++ b/docs/reference/esql/esql-using.asciidoc
@@ -9,8 +9,8 @@ Using {esql} in {kib} to query and aggregate your data, create visualizations,
and set up alerts.
<>::
-Using {esql} in {elastic-sec} to investigate events in Timeline and create
-detection rules.
+Using {esql} in {elastic-sec} to investigate events in Timeline, create
+detection rules, and build {esql} queries using Elastic AI Assistant.
<>::
Using the <> to list and cancel {esql} queries.
@@ -18,4 +18,4 @@ Using the <> to list and cancel {esql} queries.
include::esql-rest.asciidoc[]
include::esql-kibana.asciidoc[]
include::esql-security-solution.asciidoc[]
-include::task-management.asciidoc[]
\ No newline at end of file
+include::task-management.asciidoc[]
diff --git a/docs/reference/esql/functions/starts_with.asciidoc b/docs/reference/esql/functions/starts_with.asciidoc
index 38cee79ea63f8..f98a76ef68206 100644
--- a/docs/reference/esql/functions/starts_with.asciidoc
+++ b/docs/reference/esql/functions/starts_with.asciidoc
@@ -2,7 +2,7 @@
[[esql-starts_with]]
=== `STARTS_WITH`
[.text-center]
-image::esql/functions/signature/ends_with.svg[Embedded,opts=inline]
+image::esql/functions/signature/starts_with.svg[Embedded,opts=inline]
Returns a boolean that indicates whether a keyword string starts with another
string:
diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc
index 7da46e13a8ce4..5696a032b165c 100644
--- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc
+++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc
@@ -443,121 +443,8 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
-+
-.Properties of roberta
-[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
-+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
-+
-.Properties of xlm_roberta
-[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
-+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
+Refer to <> to review the properties of the
+`tokenization` object.
=====
`ner`:::
@@ -582,121 +469,8 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
-+
-.Properties of roberta
-[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
-+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
-+
-.Properties of xlm_roberta
-[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
-+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
+Refer to <> to review the
+properties of the `tokenization` object.
=====
`pass_through`:::
@@ -714,738 +488,121 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
+Refer to <> to review the properties of the
+`tokenization` object.
+=====
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
+`question_answering`:::
(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-question-answering]
+
-.Properties of roberta
+.Properties of question_answering inference
[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
+=====
+`max_answer_length`::::
(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
+The maximum amount of words in the answer. Defaults to `15`.
-`truncate`::::
+`results_field`::::
(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
+`tokenization`::::
(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
+Recommended to set `max_sentence_length` to `386` with `128` of `span` and set
+`truncate` to `none`.
+
-.Properties of xlm_roberta
-[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
+Refer to <> to review the properties of the
+`tokenization` object.
+=====
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
+`regression`:::
(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
-+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
-=====
-
-`question_answering`:::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-question-answering]
-+
-.Properties of question_answering inference
-[%collapsible%open]
-=====
-`max_answer_length`::::
-(Optional, integer)
-The maximum amount of words in the answer. Defaults to `15`.
-
-`results_field`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
-
-`tokenization`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
-+
-Recommended to set `max_sentence_length` to `386` with `128` of `span` and set
-`truncate` to `none`.
-+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
-+
-.Properties of roberta
-[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
-+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
-+
-.Properties of xlm_roberta
-[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
-+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
-=====
-
-`regression`:::
-(Optional, object)
-Regression configuration for inference.
+Regression configuration for inference.
+
.Properties of regression inference
-[%collapsible%open]
-=====
-`num_top_feature_importance_values`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values]
-
-`results_field`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
-=====
-
-`text_classification`:::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification]
-+
-.Properties of text_classification inference
-[%collapsible%open]
-=====
-`classification_labels`::::
-(Optional, string) An array of classification labels.
-
-`num_top_classes`::::
-(Optional, integer)
-Specifies the number of top class predictions to return. Defaults to all classes (-1).
-
-`results_field`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
-
-`tokenization`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
-+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
-+
-.Properties of roberta
-[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
-+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
-+
-.Properties of xlm_roberta
-[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
-+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
-=====
-`text_embedding`:::
-(Object, optional)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding]
-+
-.Properties of text_embedding inference
-[%collapsible%open]
-=====
-`embedding_size`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding-size]
-
-`results_field`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
-
-`tokenization`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
-+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
-+
-.Properties of roberta
-[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
-+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
-+
-.Properties of xlm_roberta
-[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
-+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
-=====
-`text_similarity`::::
-(Object, optional)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity]
-+
-.Properties of text_similarity inference
-[%collapsible%open]
-=====
-`span_score_combination_function`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func]
-
-`tokenization`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
-+
-.Properties of tokenization
-[%collapsible%open]
-======
-`bert`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
-+
-.Properties of bert
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
-+
-.Properties of roberta
-[%collapsible%open]
-=======
-`add_prefix_space`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
-
-`truncate`::::
-(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
-(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
-+
-.Properties of mpnet
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-
-`span`::::
+[%collapsible%open]
+=====
+`num_top_feature_importance_values`::::
(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values]
-`truncate`::::
+`results_field`::::
(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
+=====
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
+`text_classification`:::
(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification]
+
-.Properties of xlm_roberta
+.Properties of text_classification inference
[%collapsible%open]
-=======
-`max_sequence_length`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
+=====
+`classification_labels`::::
+(Optional, string) An array of classification labels.
-`span`::::
+`num_top_classes`::::
(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+Specifies the number of top class predictions to return. Defaults to all classes
+(-1).
-`truncate`::::
+`results_field`::::
(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
+`tokenization`::::
(Optional, object)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
+
-.Properties of bert_ja
-[%collapsible%open]
-=======
-`do_lower_case`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
+Refer to <> to review the properties of the
+`tokenization` object.
+=====
-`max_sequence_length`::::
+`text_embedding`:::
+(Object, optional)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding]
++
+.Properties of text_embedding inference
+[%collapsible%open]
+=====
+`embedding_size`::::
(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding-size]
-`span`::::
-(Optional, integer)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+`results_field`::::
+(Optional, string)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
+
+`tokenization`::::
+(Optional, object)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
++
+Refer to <> to review the properties of the
+`tokenization` object.
+=====
-`truncate`::::
+`text_similarity`::::
+(Object, optional)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity]
++
+.Properties of text_similarity inference
+[%collapsible%open]
+=====
+`span_score_combination_function`::::
(Optional, string)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func]
-`with_special_tokens`::::
-(Optional, boolean)
-include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
+`tokenization`::::
+(Optional, object)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
++
+Refer to <> to review the properties of the
+`tokenization` object.
=====
+
`zero_shot_classification`:::
(Object, optional)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification]
@@ -1477,190 +634,215 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field]
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization]
+
-.Properties of tokenization
+Refer to <> to review the properties of the
+`tokenization` object.
+=====
+====
+//End of inference_config
+
+//Begin input
+`input`::
+(Required, object)
+The input field names for the model definition.
++
+.Properties of `input`
[%collapsible%open]
-======
-`bert`::::
+====
+`field_names`:::
+(Required, string)
+An array of input field names for the model.
+====
+//End input
+
+// Begin location
+`location`::
+(Optional, object)
+The model definition location. If the `definition` or `compressed_definition`
+are not specified, the `location` is required.
++
+.Properties of `location`
+[%collapsible%open]
+====
+`index`:::
+(Required, object)
+Indicates that the model definition is stored in an index. This object must be
+empty as the index for storing model definitions is configured automatically.
+====
+// End location
+
+`metadata`::
+(Optional, object)
+An object map that contains metadata about the model.
+
+`model_size_bytes`::
+(Optional, integer)
+The estimated memory usage in bytes to keep the trained model in memory. This
+property is supported only if `defer_definition_decompression` is `true` or the
+model definition is not supplied.
+
+`model_type`::
+(Optional, string)
+The created model type. By default the model type is `tree_ensemble`.
+Appropriate types are:
++
+--
+* `tree_ensemble`: The model definition is an ensemble model of decision trees.
+* `lang_ident`: A special type reserved for language identification models.
+* `pytorch`: The stored definition is a PyTorch (specifically a TorchScript) model. Currently only
+NLP models are supported. For more information, refer to {ml-docs}/ml-nlp.html[{nlp-cap}].
+--
+`platform_architecture`::
+(Optional, string)
+If the model only works on one platform, because it is heavily
+optimized for a particular processor architecture and OS combination,
+then this field specifies which. The format of the string must match
+the platform identifiers used by Elasticsearch, so one of, `linux-x86_64`,
+`linux-aarch64`, `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`.
+For portable models (those that work independent of processor architecture or
+OS features), leave this field unset.
+
+
+`tags`::
+(Optional, string)
+An array of tags to organize the model.
+
+
+[[tokenization-properties]]
+=== Properties of `tokenizaton`
+
+The `tokenization` object has the following properties.
+
+`bert`::
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert]
+
.Properties of bert
[%collapsible%open]
-=======
-`do_lower_case`::::
+====
+`do_lower_case`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-`max_sequence_length`::::
+`max_sequence_length`:::
(Optional, integer)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-`truncate`::::
+`span`:::
+(Optional, integer)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+
+`truncate`:::
(Optional, string)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-`with_special_tokens`::::
+`with_special_tokens`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens]
-=======
-`roberta`::::
+====
+`roberta`::
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta]
+
.Properties of roberta
[%collapsible%open]
-=======
-`add_prefix_space`::::
+====
+`add_prefix_space`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space]
-`max_sequence_length`::::
+`max_sequence_length`:::
(Optional, integer)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-`truncate`::::
+`span`:::
+(Optional, integer)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+
+`truncate`:::
(Optional, string)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-`with_special_tokens`::::
+`with_special_tokens`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`mpnet`::::
+====
+`mpnet`::
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet]
+
.Properties of mpnet
[%collapsible%open]
-=======
-`do_lower_case`::::
+====
+`do_lower_case`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-`max_sequence_length`::::
+`max_sequence_length`:::
(Optional, integer)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-`truncate`::::
+`span`:::
+(Optional, integer)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+
+`truncate`:::
(Optional, string)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-`with_special_tokens`::::
+`with_special_tokens`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens]
-=======
-`xlm_roberta`::::
+====
+`xlm_roberta`::
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta]
+
.Properties of xlm_roberta
[%collapsible%open]
-=======
-`max_sequence_length`::::
+====
+`max_sequence_length`:::
(Optional, integer)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-`truncate`::::
+`span`:::
+(Optional, integer)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+
+`truncate`:::
(Optional, string)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-`with_special_tokens`::::
+`with_special_tokens`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens]
-=======
-`bert_ja`::::
+====
+`bert_ja`::
(Optional, object)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja]
+
.Properties of bert_ja
[%collapsible%open]
-=======
-`do_lower_case`::::
+====
+`do_lower_case`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case]
-`max_sequence_length`::::
+`max_sequence_length`:::
(Optional, integer)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length]
-`truncate`::::
+`span`:::
+(Optional, integer)
+include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span]
+
+`truncate`:::
(Optional, string)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate]
-`with_special_tokens`::::
+`with_special_tokens`:::
(Optional, boolean)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens]
-=======
-======
-=====
-====
-//End of inference_config
-
-//Begin input
-`input`::
-(Required, object)
-The input field names for the model definition.
-+
-.Properties of `input`
-[%collapsible%open]
-====
-`field_names`:::
-(Required, string)
-An array of input field names for the model.
-====
-//End input
-
-// Begin location
-`location`::
-(Optional, object)
-The model definition location. If the `definition` or `compressed_definition`
-are not specified, the `location` is required.
-+
-.Properties of `location`
-[%collapsible%open]
-====
-`index`:::
-(Required, object)
-Indicates that the model definition is stored in an index. This object must be
-empty as the index for storing model definitions is configured automatically.
====
-// End location
-
-`metadata`::
-(Optional, object)
-An object map that contains metadata about the model.
-
-`model_size_bytes`::
-(Optional, integer)
-The estimated memory usage in bytes to keep the trained model in memory. This
-property is supported only if `defer_definition_decompression` is `true` or the
-model definition is not supplied.
-
-`model_type`::
-(Optional, string)
-The created model type. By default the model type is `tree_ensemble`.
-Appropriate types are:
-+
---
-* `tree_ensemble`: The model definition is an ensemble model of decision trees.
-* `lang_ident`: A special type reserved for language identification models.
-* `pytorch`: The stored definition is a PyTorch (specifically a TorchScript) model. Currently only
-NLP models are supported. For more information, refer to {ml-docs}/ml-nlp.html[{nlp-cap}].
---
-`platform_architecture`::
-(Optional, string)
-If the model only works on one platform, because it is heavily
-optimized for a particular processor architecture and OS combination,
-then this field specifies which. The format of the string must match
-the platform identifiers used by Elasticsearch, so one of, `linux-x86_64`,
-`linux-aarch64`, `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`.
-For portable models (those that work independent of processor architecture or
-OS features), leave this field unset.
-
-
-`tags`::
-(Optional, string)
-An array of tags to organize the model.
[[ml-put-trained-models-example]]
diff --git a/docs/reference/release-notes/8.11.0.asciidoc b/docs/reference/release-notes/8.11.0.asciidoc
index 08ddaf5667845..16ff5edd6d91a 100644
--- a/docs/reference/release-notes/8.11.0.asciidoc
+++ b/docs/reference/release-notes/8.11.0.asciidoc
@@ -297,8 +297,8 @@ Transform::
* Add accessors required to recreate `TransformStats` object from the fields {es-pull}98844[#98844]
Vector Search::
-* Add new max_inner_product vector similarity function {es-pull}99445[#99445]
-* Adds `nested` support for indexed `dense_vector` fields {es-pull}99532[#99532]
+* Add new max_inner_product vector similarity function {es-pull}99527[#99527]
+* Adds `nested` support for indexed `dense_vector` fields {es-pull}99763[#99763]
* Dense vector field types are indexed by default {es-pull}98268[#98268]
* Increase the max vector dims to 4096 {es-pull}99682[#99682]
diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml
index 94ed94df43818..e8d94ce624dbb 100644
--- a/gradle/build.versions.toml
+++ b/gradle/build.versions.toml
@@ -17,6 +17,7 @@ commons-codec = "commons-codec:commons-codec:1.11"
commmons-io = "commons-io:commons-io:2.2"
docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5"
forbiddenApis = "de.thetaphi:forbiddenapis:3.6"
+gradle-enterprise = "com.gradle:gradle-enterprise-gradle-plugin:3.14.1"
hamcrest = "org.hamcrest:hamcrest:2.1"
httpcore = "org.apache.httpcomponents:httpcore:4.4.12"
httpclient = "org.apache.httpcomponents:httpclient:4.5.10"
diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java
index f71a55f4f6be0..23e5fcd312dcc 100644
--- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java
+++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java
@@ -10,7 +10,6 @@
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
@@ -37,6 +36,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
@@ -44,6 +44,8 @@
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
@@ -77,28 +79,30 @@ public void testBasic() throws Exception {
createIndex("test");
ensureGreen("test");
client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
- SearchResponse rsp = buildRequest("doc['foo'] + 1").get();
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ assertResponse(buildRequest("doc['foo'] + 1"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ });
}
public void testFunction() throws Exception {
createIndex("test");
ensureGreen("test");
client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
- SearchResponse rsp = buildRequest("doc['foo'] + abs(1)").get();
- assertNoFailures(rsp);
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ });
}
public void testBasicUsingDotValue() throws Exception {
createIndex("test");
ensureGreen("test");
client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
- SearchResponse rsp = buildRequest("doc['foo'].value + 1").get();
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ });
}
public void testScore() throws Exception {
@@ -116,13 +120,14 @@ public void testScore() throws Exception {
SearchRequestBuilder req = prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent
- SearchResponse rsp = req.get();
- assertNoFailures(rsp);
- SearchHits hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals("1", hits.getAt(0).getId());
- assertEquals("3", hits.getAt(1).getId());
- assertEquals("2", hits.getAt(2).getId());
+ assertResponse(req, rsp -> {
+ assertNoFailures(rsp);
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals("1", hits.getAt(0).getId());
+ assertEquals("3", hits.getAt(1).getId());
+ assertEquals("2", hits.getAt(2).getId());
+ });
req = prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
@@ -140,26 +145,30 @@ public void testDateMethods() throws Exception {
client().prepareIndex("test").setId("1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"),
client().prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")
);
- SearchResponse rsp = buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- SearchHits hits = rsp.getHits();
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- rsp = buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- hits = rsp.getHits();
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- rsp = buildRequest("doc['date1'].getMonth() + 1").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- hits = rsp.getHits();
- assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- rsp = buildRequest("doc['date1'].getYear()").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- hits = rsp.getHits();
- assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
+ assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
+ assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
+ assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
}
public void testDateObjectMethods() throws Exception {
@@ -170,26 +179,30 @@ public void testDateObjectMethods() throws Exception {
client().prepareIndex("test").setId("1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"),
client().prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")
);
- SearchResponse rsp = buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- SearchHits hits = rsp.getHits();
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- rsp = buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- hits = rsp.getHits();
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- rsp = buildRequest("doc['date1'].date.monthOfYear + 1").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- hits = rsp.getHits();
- assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- rsp = buildRequest("doc['date1'].date.year").get();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- hits = rsp.getHits();
- assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
+ assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
+ assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
+ assertResponse(buildRequest("doc['date1'].date.year"), rsp -> {
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ SearchHits hits = rsp.getHits();
+ assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
}
public void testMultiValueMethods() throws Exception {
@@ -221,79 +234,79 @@ public void testMultiValueMethods() throws Exception {
client().prepareIndex("test").setId("3").setSource(doc3)
);
- SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get();
- assertNoFailures(rsp);
- SearchHits hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
-
- rsp = buildRequest("doc['double0'].sum()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D);
-
- rsp = buildRequest("doc['double0'].avg() + doc['double1'].avg()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D);
-
- rsp = buildRequest("doc['double0'].median()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D);
-
- rsp = buildRequest("doc['double0'].min()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
-
- rsp = buildRequest("doc['double0'].max()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
-
- rsp = buildRequest("doc['double0'].sum()/doc['double0'].count()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
+
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
+
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
+
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
+
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
+
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
+
+ assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
// make sure count() works for missing
- rsp = buildRequest("doc['double2'].count()").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
// make sure .empty works in the same way
- rsp = buildRequest("doc['double2'].empty ? 5.0 : 2.0").get();
- assertNoFailures(rsp);
- hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
}
public void testInvalidDateMethodCall() throws Exception {
@@ -325,12 +338,12 @@ public void testSparseField() throws Exception {
client().prepareIndex("test").setId("1").setSource("id", 1, "x", 4),
client().prepareIndex("test").setId("2").setSource("id", 2, "y", 2)
);
- SearchResponse rsp = buildRequest("doc['x'] + 1").get();
- assertNoFailures(rsp);
- SearchHits hits = rsp.getHits();
- assertEquals(2, rsp.getHits().getTotalHits().value);
- assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(2, rsp.getHits().getTotalHits().value);
+ assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D);
+ });
}
public void testMissingField() throws Exception {
@@ -361,12 +374,13 @@ public void testParams() throws Exception {
);
// a = int, b = double, c = long
String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)";
- SearchResponse rsp = buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L).get();
- SearchHits hits = rsp.getHits();
- assertEquals(3, hits.getTotalHits().value);
- assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D);
- assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D);
- assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D);
+ assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> {
+ SearchHits hits = rsp.getHits();
+ assertEquals(3, hits.getTotalHits().value);
+ assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D);
+ assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D);
+ assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D);
+ });
}
public void testCompileFailure() {
@@ -484,21 +498,22 @@ public void testSpecialValueVariable() throws Exception {
.script(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "3.0", Collections.emptyMap()))
);
- SearchResponse rsp = req.get();
- assertEquals(3, rsp.getHits().getTotalHits().value);
+ assertResponse(req, rsp -> {
+ assertEquals(3, rsp.getHits().getTotalHits().value);
- Stats stats = rsp.getAggregations().get("int_agg");
- assertEquals(39.0, stats.getMax(), 0.0001);
- assertEquals(15.0, stats.getMin(), 0.0001);
+ Stats stats = rsp.getAggregations().get("int_agg");
+ assertEquals(39.0, stats.getMax(), 0.0001);
+ assertEquals(15.0, stats.getMin(), 0.0001);
- stats = rsp.getAggregations().get("double_agg");
- assertEquals(0.7, stats.getMax(), 0.0001);
- assertEquals(0.1, stats.getMin(), 0.0001);
+ stats = rsp.getAggregations().get("double_agg");
+ assertEquals(0.7, stats.getMax(), 0.0001);
+ assertEquals(0.1, stats.getMin(), 0.0001);
- stats = rsp.getAggregations().get("const_agg");
- assertThat(stats.getMax(), equalTo(3.0));
- assertThat(stats.getMin(), equalTo(3.0));
- assertThat(stats.getAvg(), equalTo(3.0));
+ stats = rsp.getAggregations().get("const_agg");
+ assertThat(stats.getMax(), equalTo(3.0));
+ assertThat(stats.getMin(), equalTo(3.0));
+ assertThat(stats.getAvg(), equalTo(3.0));
+ });
}
public void testStringSpecialValueVariable() throws Exception {
@@ -520,18 +535,19 @@ public void testStringSpecialValueVariable() throws Exception {
.script(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value", Collections.emptyMap()))
);
- String message;
+ AtomicReference message = new AtomicReference<>();
try {
// shards that don't have docs with the "text" field will not fail,
// so we may or may not get a total failure
- SearchResponse rsp = req.get();
- assertThat(rsp.getShardFailures().length, greaterThan(0)); // at least the shards containing the docs should have failed
- message = rsp.getShardFailures()[0].reason();
+ assertResponse(req, rsp -> {
+ assertThat(rsp.getShardFailures().length, greaterThan(0)); // at least the shards containing the docs should have failed
+ message.set(rsp.getShardFailures()[0].reason());
+ });
} catch (SearchPhaseExecutionException e) {
- message = e.toString();
+ message.set(e.toString());
}
- assertThat(message + "should have contained ScriptException", message.contains("ScriptException"), equalTo(true));
- assertThat(message + "should have contained text variable error", message.contains("text variable"), equalTo(true));
+ assertThat(message + "should have contained ScriptException", message.get().contains("ScriptException"), equalTo(true));
+ assertThat(message + "should have contained text variable error", message.get().contains("text variable"), equalTo(true));
}
// test to make sure expressions are not allowed to be used as update scripts
@@ -565,44 +581,52 @@ public void testPipelineAggregationScript() throws Exception {
client().prepareIndex("agg_index").setId("4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0),
client().prepareIndex("agg_index").setId("5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0)
);
- SearchResponse response = prepareSearch("agg_index").addAggregation(
- histogram("histogram").field("one")
- .interval(2)
- .subAggregation(sum("twoSum").field("two"))
- .subAggregation(sum("threeSum").field("three"))
- .subAggregation(sum("fourSum").field("four"))
- .subAggregation(
- bucketScript(
- "totalSum",
- new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()),
- "twoSum",
- "threeSum",
- "fourSum"
+ assertResponse(
+ prepareSearch("agg_index").addAggregation(
+ histogram("histogram").field("one")
+ .interval(2)
+ .subAggregation(sum("twoSum").field("two"))
+ .subAggregation(sum("threeSum").field("three"))
+ .subAggregation(sum("fourSum").field("four"))
+ .subAggregation(
+ bucketScript(
+ "totalSum",
+ new Script(
+ ScriptType.INLINE,
+ ExpressionScriptEngine.NAME,
+ "_value0 + _value1 + _value2",
+ Collections.emptyMap()
+ ),
+ "twoSum",
+ "threeSum",
+ "fourSum"
+ )
)
- )
- ).execute().actionGet();
-
- Histogram histogram = response.getAggregations().get("histogram");
- assertThat(histogram, notNullValue());
- assertThat(histogram.getName(), equalTo("histogram"));
- List extends Histogram.Bucket> buckets = histogram.getBuckets();
-
- for (int bucketCount = 0; bucketCount < buckets.size(); ++bucketCount) {
- Histogram.Bucket bucket = buckets.get(bucketCount);
- if (bucket.getDocCount() == 1) {
- SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
- assertThat(seriesArithmetic, notNullValue());
- double seriesArithmeticValue = seriesArithmetic.value();
- assertEquals(9.0, seriesArithmeticValue, 0.001);
- } else if (bucket.getDocCount() == 2) {
- SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
- assertThat(seriesArithmetic, notNullValue());
- double seriesArithmeticValue = seriesArithmetic.value();
- assertEquals(18.0, seriesArithmeticValue, 0.001);
- } else {
- fail("Incorrect number of documents in a bucket in the histogram.");
+ ),
+ response -> {
+ Histogram histogram = response.getAggregations().get("histogram");
+ assertThat(histogram, notNullValue());
+ assertThat(histogram.getName(), equalTo("histogram"));
+ List extends Histogram.Bucket> buckets = histogram.getBuckets();
+
+ for (int bucketCount = 0; bucketCount < buckets.size(); ++bucketCount) {
+ Histogram.Bucket bucket = buckets.get(bucketCount);
+ if (bucket.getDocCount() == 1) {
+ SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
+ assertThat(seriesArithmetic, notNullValue());
+ double seriesArithmeticValue = seriesArithmetic.value();
+ assertEquals(9.0, seriesArithmeticValue, 0.001);
+ } else if (bucket.getDocCount() == 2) {
+ SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
+ assertThat(seriesArithmetic, notNullValue());
+ double seriesArithmeticValue = seriesArithmetic.value();
+ assertEquals(18.0, seriesArithmeticValue, 0.001);
+ } else {
+ fail("Incorrect number of documents in a bucket in the histogram.");
+ }
+ }
}
- }
+ );
}
public void testGeo() throws Exception {
@@ -630,25 +654,25 @@ public void testGeo() throws Exception {
.actionGet();
refresh();
// access .lat
- SearchResponse rsp = buildRequest("doc['location'].lat").get();
- assertNoFailures(rsp);
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ });
// access .lon
- rsp = buildRequest("doc['location'].lon").get();
- assertNoFailures(rsp);
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ });
// access .empty
- rsp = buildRequest("doc['location'].empty ? 1 : 0").get();
- assertNoFailures(rsp);
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ });
// call haversin
- rsp = buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)").get();
- assertNoFailures(rsp);
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D);
+ assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D);
+ });
}
public void testBoolean() throws Exception {
@@ -668,27 +692,27 @@ public void testBoolean() throws Exception {
client().prepareIndex("test").setId("3").setSource("id", 3, "price", 2.0, "vip", false)
);
// access .value
- SearchResponse rsp = buildRequest("doc['vip'].value").get();
- assertNoFailures(rsp);
- assertEquals(3, rsp.getHits().getTotalHits().value);
- assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
- assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
- assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> {
+ assertEquals(3, rsp.getHits().getTotalHits().value);
+ assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
+ assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
+ });
// access .empty
- rsp = buildRequest("doc['vip'].empty ? 1 : 0").get();
- assertNoFailures(rsp);
- assertEquals(3, rsp.getHits().getTotalHits().value);
- assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
- assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
- assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> {
+ assertEquals(3, rsp.getHits().getTotalHits().value);
+ assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
+ assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
+ });
// ternary operator
// vip's have a 50% discount
- rsp = buildRequest("doc['vip'] ? doc['price']/2 : doc['price']").get();
- assertNoFailures(rsp);
- assertEquals(3, rsp.getHits().getTotalHits().value);
- assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
- assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
- assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
+ assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> {
+ assertEquals(3, rsp.getHits().getTotalHits().value);
+ assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
+ assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
+ assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
+ });
}
public void testFilterScript() throws Exception {
@@ -702,9 +726,9 @@ public void testFilterScript() throws Exception {
SearchRequestBuilder builder = buildRequest("doc['foo'].value");
Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap());
builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script)));
- SearchResponse rsp = builder.get();
- assertNoFailures(rsp);
- assertEquals(1, rsp.getHits().getTotalHits().value);
- assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ assertNoFailuresAndResponse(builder, rsp -> {
+ assertEquals(1, rsp.getHits().getTotalHits().value);
+ assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ });
}
}
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java
index d859fb509e915..4b0c365ba8b13 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java
@@ -8,6 +8,9 @@
package org.elasticsearch.script.mustache;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
@@ -31,6 +34,8 @@
public class TransportMultiSearchTemplateAction extends HandledTransportAction {
+ private static final Logger logger = LogManager.getLogger(TransportMultiSearchTemplateAction.class);
+
private final ScriptService scriptService;
private final NamedXContentRegistry xContentRegistry;
private final NodeClient client;
@@ -76,6 +81,9 @@ protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionLi
searchRequest = convert(searchTemplateRequest, searchTemplateResponse, scriptService, xContentRegistry, searchUsageHolder);
} catch (Exception e) {
items[i] = new MultiSearchTemplateResponse.Item(null, e);
+ if (ExceptionsHelper.status(e).getStatus() >= 500 && ExceptionsHelper.isNodeOrShardUnavailableTypeException(e) == false) {
+ logger.warn("MultiSearchTemplate convert failure", e);
+ }
continue;
}
items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null);
diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java
index 34ead2c21480b..cc9a3a1a248db 100644
--- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java
+++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java
@@ -806,8 +806,10 @@ public void testHasChildInnerHitsHighlighting() throws Exception {
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits();
assertThat(searchHits.length, equalTo(1));
- assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments().length, equalTo(1));
- assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments()[0].string(), equalTo("foo bar "));
+ HighlightField highlightField1 = searchHits[0].getHighlightFields().get("c_field");
+ assertThat(highlightField1.fragments().length, equalTo(1));
+ HighlightField highlightField = searchHits[0].getHighlightFields().get("c_field");
+ assertThat(highlightField.fragments()[0].string(), equalTo("foo bar "));
}
);
}
@@ -1786,7 +1788,7 @@ public void testHighlightersIgnoreParentChild() throws IOException {
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("parent-id"));
HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("searchText");
- assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox "));
+ assertThat(highlightField.fragments()[0].string(), equalTo("quick brown fox "));
}
);
@@ -1799,7 +1801,7 @@ public void testHighlightersIgnoreParentChild() throws IOException {
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("child-id"));
HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("searchText");
- assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox "));
+ assertThat(highlightField.fragments()[0].string(), equalTo("quick brown fox "));
}
);
}
diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java
index 39a84f2d16d7f..02eaacba0b1de 100644
--- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java
+++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java
@@ -26,6 +26,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
+import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
@@ -183,10 +184,8 @@ public void testSimpleParentChild() throws Exception {
response -> {
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getHits().length, equalTo(1));
- assertThat(
- innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(),
- equalTo("fox eat quick")
- );
+ HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("message");
+ assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick"));
assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(message:fox"));
assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("fox eat quick"));
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RequestRetryStats.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RequestRetryStats.java
index ae2441c2e705d..b7c37c6d95fde 100644
--- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RequestRetryStats.java
+++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RequestRetryStats.java
@@ -24,6 +24,7 @@
* This class emit aws s3 metrics as logs until we have a proper apm integration
*/
public class S3RequestRetryStats {
+ public static final String MESSAGE_FIELD = "message";
private static final Logger logger = LogManager.getLogger(S3RequestRetryStats.class);
@@ -65,7 +66,8 @@ private static long getCounter(TimingInfo info, AWSRequestMetrics.Field field) {
public void emitMetrics() {
if (logger.isDebugEnabled()) {
- var metrics = Maps.newMapWithExpectedSize(3);
+ var metrics = Maps.newMapWithExpectedSize(4);
+ metrics.put(MESSAGE_FIELD, "S3 Request Retry Stats");
metrics.put("elasticsearch.metrics.s3.requests", requests.get());
metrics.put("elasticsearch.metrics.s3.exceptions", exceptions.get());
metrics.put("elasticsearch.metrics.s3.throttles", throttles.get());
diff --git a/plugins/analysis-icu/src/internalClusterTest/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/internalClusterTest/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java
index e6f91efad0162..ae6a0cc71789f 100644
--- a/plugins/analysis-icu/src/internalClusterTest/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java
+++ b/plugins/analysis-icu/src/internalClusterTest/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java
@@ -12,7 +12,6 @@
import com.ibm.icu.util.ULocale;
import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.elasticsearch.plugins.Plugin;
@@ -31,6 +30,7 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
@@ -82,10 +82,11 @@ public void testBasicUsage() throws Exception {
.sort("id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
public void testMultipleValues() throws Exception {
@@ -126,10 +127,11 @@ public void testMultipleValues() throws Exception {
.sort("id", SortOrder.DESC) // will be ignored
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "1", "2");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "1", "2");
+ });
// same thing, using different sort mode that will use a for both docs
request = new SearchRequest().indices(index)
@@ -141,10 +143,11 @@ public void testMultipleValues() throws Exception {
.sort("id", SortOrder.DESC) // will NOT be ignored and will determine order
);
- response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
/*
@@ -186,10 +189,11 @@ public void testNormalization() throws Exception {
.sort("id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
/*
@@ -230,10 +234,11 @@ public void testSecondaryStrength() throws Exception {
.sort("id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
/*
@@ -275,10 +280,11 @@ public void testIgnorePunctuation() throws Exception {
.sort("id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
/*
@@ -321,10 +327,11 @@ public void testIgnoreWhitespace() throws Exception {
.sort("id", SortOrder.ASC)
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 3L);
- assertOrderedSearchHits(response, "3", "1", "2");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 3L);
+ assertOrderedSearchHits(response, "3", "1", "2");
+ });
}
/*
@@ -354,10 +361,11 @@ public void testNumerics() throws Exception {
SearchRequest request = new SearchRequest().indices(index)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
/*
@@ -393,10 +401,11 @@ public void testIgnoreAccentsButNotCase() throws Exception {
SearchRequest request = new SearchRequest().indices(index)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.DESC));
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 4L);
- assertOrderedSearchHits(response, "3", "1", "4", "2");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 4L);
+ assertOrderedSearchHits(response, "3", "1", "4", "2");
+ });
}
/*
@@ -429,10 +438,11 @@ public void testUpperCaseFirst() throws Exception {
SearchRequest request = new SearchRequest().indices(index)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
/*
@@ -482,9 +492,10 @@ public void testCustomRules() throws Exception {
.sort("id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
- SearchResponse response = client().search(request).actionGet();
- assertNoFailures(response);
- assertHitCount(response, 2L);
- assertOrderedSearchHits(response, "2", "1");
+ assertResponse(client().search(request), response -> {
+ assertNoFailures(response);
+ assertHitCount(response, 2L);
+ assertOrderedSearchHits(response, "2", "1");
+ });
}
}
diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java
index 45c2a9208b8d6..9b7c6afbb9f10 100644
--- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java
+++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java
@@ -56,7 +56,7 @@ protected Analyzer wrapAnalyzer(Analyzer analyzer, Integer maxAnalyzedOffset) {
}
@Override
- protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchHighlightContext.Field field, Encoder encoder) {
+ protected PassageFormatter getPassageFormatter(SearchHighlightContext.Field field, Encoder encoder) {
return new AnnotatedPassageFormatter(encoder);
}
diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java
index e92c7ca4bdebb..026dabd64eb0b 100644
--- a/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java
+++ b/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java
@@ -9,7 +9,6 @@
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.get.GetResponse;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.plugin.mapper.MapperSizePlugin;
import org.elasticsearch.plugins.Plugin;
@@ -24,6 +23,7 @@
import java.util.Map;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
@@ -110,43 +110,64 @@ public void testGetWithFields() throws Exception {
assertAcked(prepareCreate("test").setMapping("_size", "enabled=true"));
final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}";
indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON));
- SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get();
- assertEquals(source.length(), ((Long) searchResponse.getHits().getHits()[0].getFields().get("_size").getValue()).intValue());
+ assertResponse(
+ prepareSearch("test").addFetchField("_size"),
+ response -> assertEquals(
+ source.length(),
+ ((Long) response.getHits().getHits()[0].getFields().get("_size").getValue()).intValue()
+ )
+ );
// this should not work when requesting fields via wildcard expression
- searchResponse = prepareSearch("test").addFetchField("*").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addFetchField("*"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
// This should STILL work
- searchResponse = prepareSearch("test").addStoredField("*").get();
- assertNotNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addStoredField("*"),
+ response -> assertNotNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
}
public void testWildCardWithFieldsWhenDisabled() throws Exception {
assertAcked(prepareCreate("test").setMapping("_size", "enabled=false"));
final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}";
indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON));
- SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addFetchField("_size"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
- searchResponse = prepareSearch("test").addFetchField("*").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addFetchField("*"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
- searchResponse = prepareSearch("test").addStoredField("*").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addStoredField("*"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
}
public void testWildCardWithFieldsWhenNotProvided() throws Exception {
assertAcked(prepareCreate("test"));
final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}";
indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON));
- SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addFetchField("_size"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
- searchResponse = prepareSearch("test").addFetchField("*").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addFetchField("*"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
- searchResponse = prepareSearch("test").addStoredField("*").get();
- assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
+ assertResponse(
+ prepareSearch("test").addStoredField("*"),
+ response -> assertNull(response.getHits().getHits()[0].getFields().get("_size"))
+ );
}
}
diff --git a/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java b/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java
index 4d1f6426821c4..4a35779a42166 100644
--- a/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java
+++ b/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.store.smb;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.plugin.store.smb.SMBStorePlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
@@ -16,7 +15,7 @@
import java.util.Arrays;
import java.util.Collection;
-import static org.hamcrest.Matchers.is;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
public abstract class AbstractAzureFsTestCase extends ESIntegTestCase {
@Override
@@ -32,7 +31,6 @@ public void testAzureFs() {
indexDoc("test", "" + i, "foo", "bar");
}
refresh();
- SearchResponse response = prepareSearch("test").get();
- assertThat(response.getHits().getTotalHits().value, is(nbDocs));
+ assertHitCount(prepareSearch("test"), nbDocs);
}
}
diff --git a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java
index 1bb2116cc680a..63860c6355630 100644
--- a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java
+++ b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java
@@ -175,8 +175,12 @@ void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int r
)
) {
SearchResponse searchResponse = SearchResponse.fromXContent(parser);
- ElasticsearchAssertions.assertNoFailures(searchResponse);
- ElasticsearchAssertions.assertHitCount(searchResponse, expectedDocs);
+ try {
+ ElasticsearchAssertions.assertNoFailures(searchResponse);
+ ElasticsearchAssertions.assertHitCount(searchResponse, expectedDocs);
+ } finally {
+ searchResponse.decRef();
+ }
}
} catch (IOException e) {
throw new UncheckedIOException(e);
diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java
index cf76d86c9298f..755bbce93c95b 100644
--- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java
+++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java
@@ -18,9 +18,11 @@
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
+import org.elasticsearch.test.junit.annotations.TestIssueLogging;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CyclicBarrier;
+import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.support.ActionTestUtils.wrapAsRestResponseListener;
import static org.elasticsearch.test.TaskAssertions.assertAllCancellableTasksAreCancelled;
@@ -28,6 +30,10 @@
public class ClusterHealthRestCancellationIT extends HttpSmokeTestCase {
+ @TestIssueLogging(
+ issueUrl = "https://github.com/elastic/elasticsearch/issues/100062",
+ value = "org.elasticsearch.test.TaskAssertions:TRACE"
+ )
public void testClusterHealthRestCancellation() throws Exception {
final var barrier = new CyclicBarrier(2);
@@ -37,7 +43,18 @@ public void testClusterHealthRestCancellation() throws Exception {
@Override
public ClusterState execute(ClusterState currentState) {
safeAwait(barrier);
- safeAwait(barrier);
+ // safeAwait(barrier);
+
+ // temporarily lengthen timeout on safeAwait while investigating #100062
+ try {
+ barrier.await(60, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new AssertionError("unexpected", e);
+ } catch (Exception e) {
+ throw new AssertionError("unexpected", e);
+ }
+
return currentState;
}
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml
index 151698482368a..62d752b1efe88 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml
@@ -2,7 +2,34 @@ setup:
- skip:
version: ' - 8.10.99'
reason: 'Dynamic mapping of floats to dense_vector was added in 8.11'
+---
+"Fields indexed as strings won't be transformed into dense_vector":
+ - do:
+ index:
+ index: strings-are-not-floats
+ refresh: true
+ body:
+ obviously_string: ["foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo",
+ "foo", "foo", "foo", "foo", "foo", "foo", "foo", "foo"]
+ - do:
+ cluster.health:
+ wait_for_events: languid
+ - do:
+ indices.get_mapping:
+ index: strings-are-not-floats
+ - match: { strings-are-not-floats.mappings.properties.obviously_string.type: text }
---
"Fields with float arrays below the threshold still map as float":
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java
index 4b395ec6856e5..d19c61f97efd9 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java
@@ -206,7 +206,6 @@ public void testInvalidShardCountSettingsWithoutPrefix() throws Exception {
}
}
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/96578")
public void testCreateAndDeleteIndexConcurrently() throws InterruptedException {
createIndex("test");
final AtomicInteger indexVersion = new AtomicInteger(0);
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java
index 206b866bd4758..a12f7feb05b48 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java
@@ -377,7 +377,6 @@ private void assertEngineTypes() {
}
}
- @AwaitsFix(bugUrl = "ES-4677")
public void testRelocation() {
var routingTableWatcher = new RoutingTableWatcher();
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java
index 64a97bf0f6f16..295486fba2e56 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java
@@ -7,7 +7,6 @@
*/
package org.elasticsearch.search.aggregations.metrics;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
@@ -39,6 +38,7 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@@ -91,310 +91,325 @@ private static double varianceSampling(int... vals) {
@Override
public void testEmptyAggregation() throws Exception {
- SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
- .addAggregation(
- histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value"))
- )
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
- Histogram histo = searchResponse.getAggregations().get("histo");
- assertThat(histo, notNullValue());
- Histogram.Bucket bucket = histo.getBuckets().get(1);
- assertThat(bucket, notNullValue());
-
- ExtendedStats stats = bucket.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getSumOfSquares(), equalTo(0.0));
- assertThat(stats.getCount(), equalTo(0L));
- assertThat(stats.getSum(), equalTo(0.0));
- assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
- assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
- assertThat(Double.isNaN(stats.getStdDeviation()), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationPopulation()), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationSampling()), is(true));
- assertThat(Double.isNaN(stats.getAvg()), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));
+ assertResponse(
+ prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
+ .addAggregation(
+ histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value"))
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(2L));
+ Histogram histo = response.getAggregations().get("histo");
+ assertThat(histo, notNullValue());
+ Histogram.Bucket bucket = histo.getBuckets().get(1);
+ assertThat(bucket, notNullValue());
+
+ ExtendedStats stats = bucket.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getSumOfSquares(), equalTo(0.0));
+ assertThat(stats.getCount(), equalTo(0L));
+ assertThat(stats.getSum(), equalTo(0.0));
+ assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
+ assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
+ assertThat(Double.isNaN(stats.getStdDeviation()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationPopulation()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationSampling()), is(true));
+ assertThat(Double.isNaN(stats.getAvg()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));
+ }
+ );
}
@Override
public void testUnmapped() throws Exception {
- SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").field("value"))
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo(Double.NaN));
- assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
- assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
- assertThat(stats.getSum(), equalTo(0.0));
- assertThat(stats.getCount(), equalTo(0L));
- assertThat(stats.getSumOfSquares(), equalTo(0.0));
- assertThat(stats.getVariance(), equalTo(Double.NaN));
- assertThat(stats.getVariancePopulation(), equalTo(Double.NaN));
- assertThat(stats.getVarianceSampling(), equalTo(Double.NaN));
- assertThat(stats.getStdDeviation(), equalTo(Double.NaN));
- assertThat(stats.getStdDeviationPopulation(), equalTo(Double.NaN));
- assertThat(stats.getStdDeviationSampling(), equalTo(Double.NaN));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));
+ assertResponse(
+ prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(0L));
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo(Double.NaN));
+ assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
+ assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
+ assertThat(stats.getSum(), equalTo(0.0));
+ assertThat(stats.getCount(), equalTo(0L));
+ assertThat(stats.getSumOfSquares(), equalTo(0.0));
+ assertThat(stats.getVariance(), equalTo(Double.NaN));
+ assertThat(stats.getVariancePopulation(), equalTo(Double.NaN));
+ assertThat(stats.getVarianceSampling(), equalTo(Double.NaN));
+ assertThat(stats.getStdDeviation(), equalTo(Double.NaN));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(Double.NaN));
+ assertThat(stats.getStdDeviationSampling(), equalTo(Double.NaN));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));
+ }
+ );
}
public void testPartiallyUnmapped() {
double sigma = randomDouble() * 5;
- ExtendedStats s1 = prepareSearch("idx").addAggregation(extendedStats("stats").field("value").sigma(sigma))
- .get()
- .getAggregations()
- .get("stats");
- ExtendedStats s2 = prepareSearch("idx", "idx_unmapped").addAggregation(extendedStats("stats").field("value").sigma(sigma))
- .get()
- .getAggregations()
- .get("stats");
- assertEquals(s1.getAvg(), s2.getAvg(), 1e-10);
- assertEquals(s1.getCount(), s2.getCount());
- assertEquals(s1.getMin(), s2.getMin(), 0d);
- assertEquals(s1.getMax(), s2.getMax(), 0d);
- assertEquals(s1.getStdDeviation(), s2.getStdDeviation(), 1e-10);
- assertEquals(s1.getStdDeviationPopulation(), s2.getStdDeviationPopulation(), 1e-10);
- assertEquals(s1.getStdDeviationSampling(), s2.getStdDeviationSampling(), 1e-10);
- assertEquals(s1.getSumOfSquares(), s2.getSumOfSquares(), 1e-10);
- assertEquals(s1.getStdDeviationBound(Bounds.LOWER), s2.getStdDeviationBound(Bounds.LOWER), 1e-10);
- assertEquals(s1.getStdDeviationBound(Bounds.UPPER), s2.getStdDeviationBound(Bounds.UPPER), 1e-10);
- assertEquals(s1.getStdDeviationBound(Bounds.LOWER_POPULATION), s2.getStdDeviationBound(Bounds.LOWER_POPULATION), 1e-10);
- assertEquals(s1.getStdDeviationBound(Bounds.UPPER_POPULATION), s2.getStdDeviationBound(Bounds.UPPER_POPULATION), 1e-10);
- assertEquals(s1.getStdDeviationBound(Bounds.LOWER_SAMPLING), s2.getStdDeviationBound(Bounds.LOWER_SAMPLING), 1e-10);
- assertEquals(s1.getStdDeviationBound(Bounds.UPPER_SAMPLING), s2.getStdDeviationBound(Bounds.UPPER_SAMPLING), 1e-10);
+ assertResponse(prepareSearch("idx").addAggregation(extendedStats("stats").field("value").sigma(sigma)), response1 -> {
+ ExtendedStats s1 = response1.getAggregations().get("stats");
+ assertResponse(
+ prepareSearch("idx", "idx_unmapped").addAggregation(extendedStats("stats").field("value").sigma(sigma)),
+ response2 -> {
+ ExtendedStats s2 = response2.getAggregations().get("stats");
+ assertEquals(s1.getAvg(), s2.getAvg(), 1e-10);
+ assertEquals(s1.getCount(), s2.getCount());
+ assertEquals(s1.getMin(), s2.getMin(), 0d);
+ assertEquals(s1.getMax(), s2.getMax(), 0d);
+ assertEquals(s1.getStdDeviation(), s2.getStdDeviation(), 1e-10);
+ assertEquals(s1.getStdDeviationPopulation(), s2.getStdDeviationPopulation(), 1e-10);
+ assertEquals(s1.getStdDeviationSampling(), s2.getStdDeviationSampling(), 1e-10);
+ assertEquals(s1.getSumOfSquares(), s2.getSumOfSquares(), 1e-10);
+ assertEquals(s1.getStdDeviationBound(Bounds.LOWER), s2.getStdDeviationBound(Bounds.LOWER), 1e-10);
+ assertEquals(s1.getStdDeviationBound(Bounds.UPPER), s2.getStdDeviationBound(Bounds.UPPER), 1e-10);
+ assertEquals(s1.getStdDeviationBound(Bounds.LOWER_POPULATION), s2.getStdDeviationBound(Bounds.LOWER_POPULATION), 1e-10);
+ assertEquals(s1.getStdDeviationBound(Bounds.UPPER_POPULATION), s2.getStdDeviationBound(Bounds.UPPER_POPULATION), 1e-10);
+ assertEquals(s1.getStdDeviationBound(Bounds.LOWER_SAMPLING), s2.getStdDeviationBound(Bounds.LOWER_SAMPLING), 1e-10);
+ assertEquals(s1.getStdDeviationBound(Bounds.UPPER_SAMPLING), s2.getStdDeviationBound(Bounds.UPPER_SAMPLING), 1e-10);
+ }
+ );
+ });
}
@Override
public void testSingleValuedField() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").field("value").sigma(sigma))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMax(), equalTo(10.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value").sigma(sigma)),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMax(), equalTo(10.0));
+ assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
public void testSingleValuedFieldDefaultSigma() throws Exception {
// Same as previous test, but uses a default value for sigma
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").field("value"))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMax(), equalTo(10.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- checkUpperLowerBounds(stats, 2);
+ assertResponse(prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")), response -> {
+
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMax(), equalTo(10.0));
+ assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ checkUpperLowerBounds(stats, 2);
+ });
}
public void testSingleValuedField_WithFormatter() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
- assertThat(stats.getAvgAsString(), equalTo("0005.5"));
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMinAsString(), equalTo("0001.0"));
- assertThat(stats.getMax(), equalTo(10.0));
- assertThat(stats.getMaxAsString(), equalTo("0010.0"));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
- assertThat(stats.getSumAsString(), equalTo("0055.0"));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
- assertThat(stats.getSumOfSquaresAsString(), equalTo("0385.0"));
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVarianceAsString(), equalTo("0008.2"));
- assertThat(stats.getVariancePopulationAsString(), equalTo("0008.2"));
- assertThat(stats.getVarianceSamplingAsString(), equalTo("0009.2"));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationAsString(), equalTo("0002.9"));
- assertThat(stats.getStdDeviationPopulationAsString(), equalTo("0002.9"));
- assertThat(stats.getStdDeviationSamplingAsString(), equalTo("0003.0"));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma)),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
+ assertThat(stats.getAvgAsString(), equalTo("0005.5"));
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMinAsString(), equalTo("0001.0"));
+ assertThat(stats.getMax(), equalTo(10.0));
+ assertThat(stats.getMaxAsString(), equalTo("0010.0"));
+ assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
+ assertThat(stats.getSumAsString(), equalTo("0055.0"));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
+ assertThat(stats.getSumOfSquaresAsString(), equalTo("0385.0"));
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVarianceAsString(), equalTo("0008.2"));
+ assertThat(stats.getVariancePopulationAsString(), equalTo("0008.2"));
+ assertThat(stats.getVarianceSamplingAsString(), equalTo("0009.2"));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationAsString(), equalTo("0002.9"));
+ assertThat(stats.getStdDeviationPopulationAsString(), equalTo("0002.9"));
+ assertThat(stats.getStdDeviationSamplingAsString(), equalTo("0003.0"));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
@Override
public void testSingleValuedFieldGetProperty() throws Exception {
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(global("global").subAggregation(extendedStats("stats").field("value")))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Global global = searchResponse.getAggregations().get("global");
- assertThat(global, notNullValue());
- assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(10L));
- assertThat(global.getAggregations(), notNullValue());
- assertThat(global.getAggregations().asMap().size(), equalTo(1));
-
- ExtendedStats stats = global.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- ExtendedStats statsFromProperty = (ExtendedStats) ((InternalAggregation) global).getProperty("stats");
- assertThat(statsFromProperty, notNullValue());
- assertThat(statsFromProperty, sameInstance(stats));
- double expectedAvgValue = (double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10;
- assertThat(stats.getAvg(), equalTo(expectedAvgValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.avg"), equalTo(expectedAvgValue));
- double expectedMinValue = 1.0;
- assertThat(stats.getMin(), equalTo(expectedMinValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.min"), equalTo(expectedMinValue));
- double expectedMaxValue = 10.0;
- assertThat(stats.getMax(), equalTo(expectedMaxValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.max"), equalTo(expectedMaxValue));
- double expectedSumValue = 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10;
- assertThat(stats.getSum(), equalTo(expectedSumValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.sum"), equalTo(expectedSumValue));
- long expectedCountValue = 10;
- assertThat(stats.getCount(), equalTo(expectedCountValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.count"), equalTo((double) expectedCountValue));
- double expectedSumOfSquaresValue = (double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100;
- assertThat(stats.getSumOfSquares(), equalTo(expectedSumOfSquaresValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.sum_of_squares"), equalTo(expectedSumOfSquaresValue));
- double expectedVarianceValue = variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- assertThat(stats.getVariance(), equalTo(expectedVarianceValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.variance"), equalTo(expectedVarianceValue));
- double expectedVariancePopulationValue = variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- assertThat(stats.getVariancePopulation(), equalTo(expectedVariancePopulationValue));
- assertThat(
- (double) ((InternalAggregation) global).getProperty("stats.variance_population"),
- equalTo(expectedVariancePopulationValue)
- );
- double expectedVarianceSamplingValue = varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- assertThat(stats.getVarianceSampling(), equalTo(expectedVarianceSamplingValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.variance_sampling"), equalTo(expectedVarianceSamplingValue));
- double expectedStdDevValue = stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- assertThat(stats.getStdDeviation(), equalTo(expectedStdDevValue));
- assertThat((double) ((InternalAggregation) global).getProperty("stats.std_deviation"), equalTo(expectedStdDevValue));
- double expectedStdDevPopulationValue = stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- assertThat(stats.getStdDeviationPopulation(), equalTo(expectedStdDevValue));
- assertThat(
- (double) ((InternalAggregation) global).getProperty("stats.std_deviation_population"),
- equalTo(expectedStdDevPopulationValue)
- );
- double expectedStdDevSamplingValue = stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
- assertThat(stats.getStdDeviationSampling(), equalTo(expectedStdDevSamplingValue));
- assertThat(
- (double) ((InternalAggregation) global).getProperty("stats.std_deviation_sampling"),
- equalTo(expectedStdDevSamplingValue)
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(global("global").subAggregation(extendedStats("stats").field("value"))),
+ response -> {
+ assertHitCount(response, 10);
+
+ Global global = response.getAggregations().get("global");
+ assertThat(global, notNullValue());
+ assertThat(global.getName(), equalTo("global"));
+ assertThat(global.getDocCount(), equalTo(10L));
+ assertThat(global.getAggregations(), notNullValue());
+ assertThat(global.getAggregations().asMap().size(), equalTo(1));
+
+ ExtendedStats stats = global.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ ExtendedStats statsFromProperty = (ExtendedStats) ((InternalAggregation) global).getProperty("stats");
+ assertThat(statsFromProperty, notNullValue());
+ assertThat(statsFromProperty, sameInstance(stats));
+ double expectedAvgValue = (double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10;
+ assertThat(stats.getAvg(), equalTo(expectedAvgValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.avg"), equalTo(expectedAvgValue));
+ double expectedMinValue = 1.0;
+ assertThat(stats.getMin(), equalTo(expectedMinValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.min"), equalTo(expectedMinValue));
+ double expectedMaxValue = 10.0;
+ assertThat(stats.getMax(), equalTo(expectedMaxValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.max"), equalTo(expectedMaxValue));
+ double expectedSumValue = 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10;
+ assertThat(stats.getSum(), equalTo(expectedSumValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.sum"), equalTo(expectedSumValue));
+ long expectedCountValue = 10;
+ assertThat(stats.getCount(), equalTo(expectedCountValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.count"), equalTo((double) expectedCountValue));
+ double expectedSumOfSquaresValue = (double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100;
+ assertThat(stats.getSumOfSquares(), equalTo(expectedSumOfSquaresValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.sum_of_squares"), equalTo(expectedSumOfSquaresValue));
+ double expectedVarianceValue = variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
+ assertThat(stats.getVariance(), equalTo(expectedVarianceValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.variance"), equalTo(expectedVarianceValue));
+ double expectedVariancePopulationValue = variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
+ assertThat(stats.getVariancePopulation(), equalTo(expectedVariancePopulationValue));
+ assertThat(
+ (double) ((InternalAggregation) global).getProperty("stats.variance_population"),
+ equalTo(expectedVariancePopulationValue)
+ );
+ double expectedVarianceSamplingValue = varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
+ assertThat(stats.getVarianceSampling(), equalTo(expectedVarianceSamplingValue));
+ assertThat(
+ (double) ((InternalAggregation) global).getProperty("stats.variance_sampling"),
+ equalTo(expectedVarianceSamplingValue)
+ );
+ double expectedStdDevValue = stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
+ assertThat(stats.getStdDeviation(), equalTo(expectedStdDevValue));
+ assertThat((double) ((InternalAggregation) global).getProperty("stats.std_deviation"), equalTo(expectedStdDevValue));
+ double expectedStdDevPopulationValue = stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
+ assertThat(stats.getStdDeviationPopulation(), equalTo(expectedStdDevValue));
+ assertThat(
+ (double) ((InternalAggregation) global).getProperty("stats.std_deviation_population"),
+ equalTo(expectedStdDevPopulationValue)
+ );
+ double expectedStdDevSamplingValue = stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
+ assertThat(stats.getStdDeviationSampling(), equalTo(expectedStdDevSamplingValue));
+ assertThat(
+ (double) ((InternalAggregation) global).getProperty("stats.std_deviation_sampling"),
+ equalTo(expectedStdDevSamplingValue)
+ );
+ }
);
}
@Override
public void testSingleValuedFieldPartiallyUnmapped() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").field("value").sigma(sigma))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMax(), equalTo(10.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
+ .addAggregation(extendedStats("stats").field("value").sigma(sigma)),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMax(), equalTo(10.0));
+ assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
@Override
public void testSingleValuedFieldWithValueScript() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- extendedStats("stats").field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
- .sigma(sigma)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10));
- assertThat(stats.getMin(), equalTo(2.0));
- assertThat(stats.getMax(), equalTo(11.0));
- assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121));
- assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ extendedStats("stats").field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
+ .sigma(sigma)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10));
+ assertThat(stats.getMin(), equalTo(2.0));
+ assertThat(stats.getMax(), equalTo(11.0));
+ assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121));
+ assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
@Override
@@ -402,118 +417,139 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception {
Map params = new HashMap<>();
params.put("inc", 1);
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- extendedStats("stats").field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))
- .sigma(sigma)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10));
- assertThat(stats.getMin(), equalTo(2.0));
- assertThat(stats.getMax(), equalTo(11.0));
- assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121));
- assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ extendedStats("stats").field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))
+ .sigma(sigma)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10));
+ assertThat(stats.getMin(), equalTo(2.0));
+ assertThat(stats.getMax(), equalTo(11.0));
+ assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121));
+ assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
@Override
public void testMultiValuedField() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").field("values").sigma(sigma))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(
- stats.getAvg(),
- equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20)
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("values").sigma(sigma)),
+ response -> {
+
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(
+ stats.getAvg(),
+ equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20)
+ );
+ assertThat(stats.getMin(), equalTo(2.0));
+ assertThat(stats.getMax(), equalTo(12.0));
+ assertThat(
+ stats.getSum(),
+ equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12)
+ );
+ assertThat(stats.getCount(), equalTo(20L));
+ assertThat(
+ stats.getSumOfSquares(),
+ equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 144)
+ );
+ assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
+ assertThat(
+ stats.getVariancePopulation(),
+ equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ assertThat(
+ stats.getVarianceSampling(),
+ equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
+ assertThat(
+ stats.getStdDeviationPopulation(),
+ equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ assertThat(
+ stats.getStdDeviationSampling(),
+ equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ checkUpperLowerBounds(stats, sigma);
+ }
);
- assertThat(stats.getMin(), equalTo(2.0));
- assertThat(stats.getMax(), equalTo(12.0));
- assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12));
- assertThat(stats.getCount(), equalTo(20L));
- assertThat(
- stats.getSumOfSquares(),
- equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 144)
- );
- assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
- assertThat(
- stats.getVariancePopulation(),
- equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
- );
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
- assertThat(
- stats.getStdDeviationPopulation(),
- equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
- );
- assertThat(
- stats.getStdDeviationSampling(),
- equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
- );
- checkUpperLowerBounds(stats, sigma);
}
@Override
public void testMultiValuedFieldWithValueScript() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- extendedStats("stats").field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", Collections.emptyMap()))
- .sigma(sigma)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(
- stats.getAvg(),
- equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 20)
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ extendedStats("stats").field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", Collections.emptyMap()))
+ .sigma(sigma)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(
+ stats.getAvg(),
+ equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 20)
+ );
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMax(), equalTo(11.0));
+ assertThat(
+ stats.getSum(),
+ equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)
+ );
+ assertThat(stats.getCount(), equalTo(20L));
+ assertThat(
+ stats.getSumOfSquares(),
+ equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)
+ );
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(
+ stats.getVariancePopulation(),
+ equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ assertThat(
+ stats.getVarianceSampling(),
+ equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(
+ stats.getStdDeviationPopulation(),
+ equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ assertThat(
+ stats.getStdDeviationSampling(),
+ equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+
+ checkUpperLowerBounds(stats, sigma);
+ }
);
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMax(), equalTo(11.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
- assertThat(stats.getCount(), equalTo(20L));
- assertThat(
- stats.getSumOfSquares(),
- equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)
- );
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(
- stats.getVariancePopulation(),
- equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
- );
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(
- stats.getStdDeviationPopulation(),
- equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
- );
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
-
- checkUpperLowerBounds(stats, sigma);
}
@Override
@@ -521,75 +557,88 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
Map params = new HashMap<>();
params.put("dec", 1);
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- extendedStats("stats").field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
- .sigma(sigma)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(
- stats.getAvg(),
- equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 20)
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ extendedStats("stats").field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
+ .sigma(sigma)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(
+ stats.getAvg(),
+ equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 20)
+ );
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMax(), equalTo(11.0));
+ assertThat(
+ stats.getSum(),
+ equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)
+ );
+ assertThat(stats.getCount(), equalTo(20L));
+ assertThat(
+ stats.getSumOfSquares(),
+ equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)
+ );
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(
+ stats.getVariancePopulation(),
+ equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ assertThat(
+ stats.getVarianceSampling(),
+ equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(
+ stats.getStdDeviationPopulation(),
+ equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ assertThat(
+ stats.getStdDeviationSampling(),
+ equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ );
+ checkUpperLowerBounds(stats, sigma);
+ }
);
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMax(), equalTo(11.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
- assertThat(stats.getCount(), equalTo(20L));
- assertThat(
- stats.getSumOfSquares(),
- equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)
- );
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(
- stats.getVariancePopulation(),
- equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
- );
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(
- stats.getStdDeviationPopulation(),
- equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
- );
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- checkUpperLowerBounds(stats, sigma);
}
@Override
public void testScriptSingleValued() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- extendedStats("stats").script(
- new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap())
- ).sigma(sigma)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
- assertThat(stats.getMin(), equalTo(1.0));
- assertThat(stats.getMax(), equalTo(10.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ extendedStats("stats").script(
+ new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap())
+ ).sigma(sigma)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10));
+ assertThat(stats.getMin(), equalTo(1.0));
+ assertThat(stats.getMax(), equalTo(10.0));
+ assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
@Override
@@ -600,74 +649,83 @@ public void testScriptSingleValuedWithParams() throws Exception {
Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params);
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").script(script).sigma(sigma))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10));
- assertThat(stats.getMin(), equalTo(2.0));
- assertThat(stats.getMax(), equalTo(11.0));
- assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
- assertThat(stats.getCount(), equalTo(10L));
- assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121));
- assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
- checkUpperLowerBounds(stats, sigma);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").script(script).sigma(sigma)),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10));
+ assertThat(stats.getMin(), equalTo(2.0));
+ assertThat(stats.getMax(), equalTo(11.0));
+ assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11));
+ assertThat(stats.getCount(), equalTo(10L));
+ assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121));
+ assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11)));
+ checkUpperLowerBounds(stats, sigma);
+ }
+ );
}
@Override
public void testScriptMultiValued() throws Exception {
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- extendedStats("stats").script(
- new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap())
- ).sigma(sigma)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(
- stats.getAvg(),
- equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20)
- );
- assertThat(stats.getMin(), equalTo(2.0));
- assertThat(stats.getMax(), equalTo(12.0));
- assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12));
- assertThat(stats.getCount(), equalTo(20L));
- assertThat(
- stats.getSumOfSquares(),
- equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 144)
- );
- assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
- assertThat(
- stats.getVariancePopulation(),
- equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
- );
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
- assertThat(
- stats.getStdDeviationPopulation(),
- equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
- );
- assertThat(
- stats.getStdDeviationSampling(),
- equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ extendedStats("stats").script(
+ new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap())
+ ).sigma(sigma)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(
+ stats.getAvg(),
+ equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20)
+ );
+ assertThat(stats.getMin(), equalTo(2.0));
+ assertThat(stats.getMax(), equalTo(12.0));
+ assertThat(
+ stats.getSum(),
+ equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12)
+ );
+ assertThat(stats.getCount(), equalTo(20L));
+ assertThat(
+ stats.getSumOfSquares(),
+ equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 144)
+ );
+ assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
+ assertThat(
+ stats.getVariancePopulation(),
+ equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ assertThat(
+ stats.getVarianceSampling(),
+ equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)));
+ assertThat(
+ stats.getStdDeviationPopulation(),
+ equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ assertThat(
+ stats.getStdDeviationSampling(),
+ equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ );
+ checkUpperLowerBounds(stats, sigma);
+ }
);
- checkUpperLowerBounds(stats, sigma);
}
@Override
@@ -683,125 +741,147 @@ public void testScriptMultiValuedWithParams() throws Exception {
);
double sigma = randomDouble() * randomIntBetween(1, 10);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(extendedStats("stats").script(script).sigma(sigma))
- .get();
-
- assertHitCount(searchResponse, 10);
-
- ExtendedStats stats = searchResponse.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9) / 20));
- assertThat(stats.getMin(), equalTo(0.0));
- assertThat(stats.getMax(), equalTo(10.0));
- assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9));
- assertThat(stats.getCount(), equalTo(20L));
- assertThat(
- stats.getSumOfSquares(),
- equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 0 + 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81)
- );
- assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
- assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
- assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
- assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
- assertThat(
- stats.getStdDeviationPopulation(),
- equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").script(script).sigma(sigma)),
+ response -> {
+ assertHitCount(response, 10);
+
+ ExtendedStats stats = response.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(
+ stats.getAvg(),
+ equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9) / 20)
+ );
+ assertThat(stats.getMin(), equalTo(0.0));
+ assertThat(stats.getMax(), equalTo(10.0));
+ assertThat(
+ stats.getSum(),
+ equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9)
+ );
+ assertThat(stats.getCount(), equalTo(20L));
+ assertThat(
+ stats.getSumOfSquares(),
+ equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 0 + 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81)
+ );
+ assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
+ assertThat(
+ stats.getVariancePopulation(),
+ equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
+ );
+ assertThat(
+ stats.getVarianceSampling(),
+ equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
+ );
+ assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
+ assertThat(
+ stats.getStdDeviationPopulation(),
+ equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
+ );
+ assertThat(
+ stats.getStdDeviationSampling(),
+ equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
+ );
+ checkUpperLowerBounds(stats, sigma);
+ }
);
- assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)));
- checkUpperLowerBounds(stats, sigma);
}
public void testEmptySubAggregation() {
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- terms("value").field("value")
- .subAggregation(missing("values").field("values").subAggregation(extendedStats("stats").field("value")))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Terms terms = searchResponse.getAggregations().get("value");
- assertThat(terms, notNullValue());
- assertThat(terms.getBuckets().size(), equalTo(10));
-
- for (Terms.Bucket bucket : terms.getBuckets()) {
- assertThat(bucket.getDocCount(), equalTo(1L));
-
- Missing missing = bucket.getAggregations().get("values");
- assertThat(missing, notNullValue());
- assertThat(missing.getDocCount(), equalTo(0L));
-
- ExtendedStats stats = missing.getAggregations().get("stats");
- assertThat(stats, notNullValue());
- assertThat(stats.getName(), equalTo("stats"));
- assertThat(stats.getSumOfSquares(), equalTo(0.0));
- assertThat(stats.getCount(), equalTo(0L));
- assertThat(stats.getSum(), equalTo(0.0));
- assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
- assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
- assertThat(Double.isNaN(stats.getStdDeviation()), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationPopulation()), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationSampling()), is(true));
- assertThat(Double.isNaN(stats.getAvg()), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true));
- assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));
- }
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ terms("value").field("value")
+ .subAggregation(missing("values").field("values").subAggregation(extendedStats("stats").field("value")))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Terms terms = response.getAggregations().get("value");
+ assertThat(terms, notNullValue());
+ assertThat(terms.getBuckets().size(), equalTo(10));
+
+ for (Terms.Bucket bucket : terms.getBuckets()) {
+ assertThat(bucket.getDocCount(), equalTo(1L));
+
+ Missing missing = bucket.getAggregations().get("values");
+ assertThat(missing, notNullValue());
+ assertThat(missing.getDocCount(), equalTo(0L));
+
+ ExtendedStats stats = missing.getAggregations().get("stats");
+ assertThat(stats, notNullValue());
+ assertThat(stats.getName(), equalTo("stats"));
+ assertThat(stats.getSumOfSquares(), equalTo(0.0));
+ assertThat(stats.getCount(), equalTo(0L));
+ assertThat(stats.getSum(), equalTo(0.0));
+ assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY));
+ assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
+ assertThat(Double.isNaN(stats.getStdDeviation()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationPopulation()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationSampling()), is(true));
+ assertThat(Double.isNaN(stats.getAvg()), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true));
+ assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));
+ }
+ }
+ );
}
@Override
public void testOrderByEmptyAggregation() throws Exception {
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- terms("terms").field("value")
- .order(BucketOrder.compound(BucketOrder.aggregation("filter>extendedStats.avg", true)))
- .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(extendedStats("extendedStats").field("value")))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Terms terms = searchResponse.getAggregations().get("terms");
- assertThat(terms, notNullValue());
- List extends Terms.Bucket> buckets = terms.getBuckets();
- assertThat(buckets, notNullValue());
- assertThat(buckets.size(), equalTo(10));
-
- for (int i = 0; i < 10; i++) {
- Terms.Bucket bucket = buckets.get(i);
- assertThat(bucket, notNullValue());
- assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1));
- assertThat(bucket.getDocCount(), equalTo(1L));
- Filter filter = bucket.getAggregations().get("filter");
- assertThat(filter, notNullValue());
- assertThat(filter.getDocCount(), equalTo(0L));
- ExtendedStats extendedStats = filter.getAggregations().get("extendedStats");
- assertThat(extendedStats, notNullValue());
- assertThat(extendedStats.getMin(), equalTo(Double.POSITIVE_INFINITY));
- assertThat(extendedStats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
- assertThat(extendedStats.getAvg(), equalTo(Double.NaN));
- assertThat(extendedStats.getSum(), equalTo(0.0));
- assertThat(extendedStats.getCount(), equalTo(0L));
- assertThat(extendedStats.getStdDeviation(), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationPopulation(), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationSampling(), equalTo(Double.NaN));
- assertThat(extendedStats.getSumOfSquares(), equalTo(0.0));
- assertThat(extendedStats.getVariance(), equalTo(Double.NaN));
- assertThat(extendedStats.getVariancePopulation(), equalTo(Double.NaN));
- assertThat(extendedStats.getVarianceSampling(), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationBound(Bounds.LOWER), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationBound(Bounds.UPPER), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationBound(Bounds.LOWER_POPULATION), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationBound(Bounds.UPPER_POPULATION), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationBound(Bounds.LOWER_SAMPLING), equalTo(Double.NaN));
- assertThat(extendedStats.getStdDeviationBound(Bounds.UPPER_SAMPLING), equalTo(Double.NaN));
- }
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ terms("terms").field("value")
+ .order(BucketOrder.compound(BucketOrder.aggregation("filter>extendedStats.avg", true)))
+ .subAggregation(
+ filter("filter", termQuery("value", 100)).subAggregation(extendedStats("extendedStats").field("value"))
+ )
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ List extends Terms.Bucket> buckets = terms.getBuckets();
+ assertThat(buckets, notNullValue());
+ assertThat(buckets.size(), equalTo(10));
+
+ for (int i = 0; i < 10; i++) {
+ Terms.Bucket bucket = buckets.get(i);
+ assertThat(bucket, notNullValue());
+ assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1));
+ assertThat(bucket.getDocCount(), equalTo(1L));
+ Filter filter = bucket.getAggregations().get("filter");
+ assertThat(filter, notNullValue());
+ assertThat(filter.getDocCount(), equalTo(0L));
+ ExtendedStats extendedStats = filter.getAggregations().get("extendedStats");
+ assertThat(extendedStats, notNullValue());
+ assertThat(extendedStats.getMin(), equalTo(Double.POSITIVE_INFINITY));
+ assertThat(extendedStats.getMax(), equalTo(Double.NEGATIVE_INFINITY));
+ assertThat(extendedStats.getAvg(), equalTo(Double.NaN));
+ assertThat(extendedStats.getSum(), equalTo(0.0));
+ assertThat(extendedStats.getCount(), equalTo(0L));
+ assertThat(extendedStats.getStdDeviation(), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationPopulation(), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationSampling(), equalTo(Double.NaN));
+ assertThat(extendedStats.getSumOfSquares(), equalTo(0.0));
+ assertThat(extendedStats.getVariance(), equalTo(Double.NaN));
+ assertThat(extendedStats.getVariancePopulation(), equalTo(Double.NaN));
+ assertThat(extendedStats.getVarianceSampling(), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationBound(Bounds.LOWER), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationBound(Bounds.UPPER), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationBound(Bounds.LOWER_POPULATION), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationBound(Bounds.UPPER_POPULATION), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationBound(Bounds.LOWER_SAMPLING), equalTo(Double.NaN));
+ assertThat(extendedStats.getStdDeviationBound(Bounds.UPPER_SAMPLING), equalTo(Double.NaN));
+ }
+ }
+ );
}
private void checkUpperLowerBounds(ExtendedStats stats, double sigma) {
@@ -845,13 +925,13 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a nondeterministic script does not get cached
- SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- extendedStats("foo").field("d")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ extendedStats("foo").field("d")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -863,13 +943,13 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a deterministic script gets cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- extendedStats("foo").field("d")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ extendedStats("foo").field("d")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -881,8 +961,7 @@ public void testScriptCaching() throws Exception {
);
// Ensure that non-scripted requests are cached as normal
- r = prepareSearch("cache_test_idx").setSize(0).addAggregation(extendedStats("foo").field("d")).get();
- assertNoFailures(r);
+ assertNoFailures(prepareSearch("cache_test_idx").setSize(0).addAggregation(extendedStats("foo").field("d")));
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -893,5 +972,4 @@ public void testScriptCaching() throws Exception {
equalTo(2L)
);
}
-
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java
index 3aebbce43e1e1..f8b633dca1a10 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java
@@ -8,13 +8,12 @@
package org.elasticsearch.search.aggregations.metrics;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.SpatialPoint;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.geo.RandomGeoGenerator;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
@@ -26,45 +25,42 @@
public class GeoBoundsIT extends SpatialBoundsAggregationTestBase {
public void testSingleValuedFieldNearDateLine() {
- SearchResponse response = prepareSearch(DATELINE_IDX_NAME).addAggregation(
- boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)
- ).get();
-
- assertNoFailures(response);
-
- GeoPoint geoValuesTopLeft = new GeoPoint(38, -179);
- GeoPoint geoValuesBottomRight = new GeoPoint(-24, 178);
-
- GeoBounds geoBounds = response.getAggregations().get(aggName());
- assertThat(geoBounds, notNullValue());
- assertThat(geoBounds.getName(), equalTo(aggName()));
- GeoPoint topLeft = geoBounds.topLeft();
- GeoPoint bottomRight = geoBounds.bottomRight();
- assertThat(topLeft.getY(), closeTo(geoValuesTopLeft.getY(), GEOHASH_TOLERANCE));
- assertThat(topLeft.getX(), closeTo(geoValuesTopLeft.getX(), GEOHASH_TOLERANCE));
- assertThat(bottomRight.getY(), closeTo(geoValuesBottomRight.getY(), GEOHASH_TOLERANCE));
- assertThat(bottomRight.getX(), closeTo(geoValuesBottomRight.getX(), GEOHASH_TOLERANCE));
+ assertNoFailuresAndResponse(
+ prepareSearch(DATELINE_IDX_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)),
+ response -> {
+ GeoPoint geoValuesTopLeft = new GeoPoint(38, -179);
+ GeoPoint geoValuesBottomRight = new GeoPoint(-24, 178);
+
+ GeoBounds geoBounds = response.getAggregations().get(aggName());
+ assertThat(geoBounds, notNullValue());
+ assertThat(geoBounds.getName(), equalTo(aggName()));
+ GeoPoint topLeft = geoBounds.topLeft();
+ GeoPoint bottomRight = geoBounds.bottomRight();
+ assertThat(topLeft.getY(), closeTo(geoValuesTopLeft.getY(), GEOHASH_TOLERANCE));
+ assertThat(topLeft.getX(), closeTo(geoValuesTopLeft.getX(), GEOHASH_TOLERANCE));
+ assertThat(bottomRight.getY(), closeTo(geoValuesBottomRight.getY(), GEOHASH_TOLERANCE));
+ assertThat(bottomRight.getX(), closeTo(geoValuesBottomRight.getX(), GEOHASH_TOLERANCE));
+ }
+ );
}
public void testSingleValuedFieldNearDateLineWrapLongitude() {
-
GeoPoint geoValuesTopLeft = new GeoPoint(38, 170);
GeoPoint geoValuesBottomRight = new GeoPoint(-24, -175);
- SearchResponse response = prepareSearch(DATELINE_IDX_NAME).addAggregation(
- boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(true)
- ).get();
-
- assertNoFailures(response);
-
- GeoBounds geoBounds = response.getAggregations().get(aggName());
- assertThat(geoBounds, notNullValue());
- assertThat(geoBounds.getName(), equalTo(aggName()));
- GeoPoint topLeft = geoBounds.topLeft();
- GeoPoint bottomRight = geoBounds.bottomRight();
- assertThat(topLeft.getY(), closeTo(geoValuesTopLeft.getY(), GEOHASH_TOLERANCE));
- assertThat(topLeft.getX(), closeTo(geoValuesTopLeft.getX(), GEOHASH_TOLERANCE));
- assertThat(bottomRight.getY(), closeTo(geoValuesBottomRight.getY(), GEOHASH_TOLERANCE));
- assertThat(bottomRight.getX(), closeTo(geoValuesBottomRight.getX(), GEOHASH_TOLERANCE));
+ assertNoFailuresAndResponse(
+ prepareSearch(DATELINE_IDX_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(true)),
+ response -> {
+ GeoBounds geoBounds = response.getAggregations().get(aggName());
+ assertThat(geoBounds, notNullValue());
+ assertThat(geoBounds.getName(), equalTo(aggName()));
+ GeoPoint topLeft = geoBounds.topLeft();
+ GeoPoint bottomRight = geoBounds.bottomRight();
+ assertThat(topLeft.getY(), closeTo(geoValuesTopLeft.getY(), GEOHASH_TOLERANCE));
+ assertThat(topLeft.getX(), closeTo(geoValuesTopLeft.getX(), GEOHASH_TOLERANCE));
+ assertThat(bottomRight.getY(), closeTo(geoValuesBottomRight.getY(), GEOHASH_TOLERANCE));
+ assertThat(bottomRight.getX(), closeTo(geoValuesBottomRight.getX(), GEOHASH_TOLERANCE));
+ }
+ );
}
@Override
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java
index 4b12cddde691f..a7d32863718e3 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java
@@ -8,7 +8,6 @@
package org.elasticsearch.search.aggregations.metrics;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.SpatialPoint;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGrid;
@@ -18,7 +17,7 @@
import java.util.List;
import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
@@ -29,21 +28,24 @@
public class GeoCentroidIT extends CentroidAggregationTestBase {
public void testSingleValueFieldAsSubAggToGeohashGrid() {
- SearchResponse response = prepareSearch(HIGH_CARD_IDX_NAME).addAggregation(
- geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME).subAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME))
- ).get();
- assertNoFailures(response);
-
- GeoGrid grid = response.getAggregations().get("geoGrid");
- assertThat(grid, notNullValue());
- assertThat(grid.getName(), equalTo("geoGrid"));
- List extends GeoGrid.Bucket> buckets = grid.getBuckets();
- for (GeoGrid.Bucket cell : buckets) {
- String geohash = cell.getKeyAsString();
- SpatialPoint expectedCentroid = expectedCentroidsForGeoHash.get(geohash);
- GeoCentroid centroidAgg = cell.getAggregations().get(aggName());
- assertSameCentroid(centroidAgg.centroid(), expectedCentroid);
- }
+ assertNoFailuresAndResponse(
+ prepareSearch(HIGH_CARD_IDX_NAME).addAggregation(
+ geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME)
+ .subAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME))
+ ),
+ response -> {
+ GeoGrid grid = response.getAggregations().get("geoGrid");
+ assertThat(grid, notNullValue());
+ assertThat(grid.getName(), equalTo("geoGrid"));
+ List extends GeoGrid.Bucket> buckets = grid.getBuckets();
+ for (GeoGrid.Bucket cell : buckets) {
+ String geohash = cell.getKeyAsString();
+ SpatialPoint expectedCentroid = expectedCentroidsForGeoHash.get(geohash);
+ GeoCentroid centroidAgg = cell.getAggregations().get(aggName());
+ assertSameCentroid(centroidAgg.centroid(), expectedCentroid);
+ }
+ }
+ );
}
@Override
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java
index 7d5e446d591bb..d4b5be3045cdf 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java
@@ -8,7 +8,6 @@
package org.elasticsearch.search.aggregations.metrics;
import org.apache.logging.log4j.LogManager;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
@@ -40,6 +39,7 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
@@ -99,70 +99,76 @@ private void assertConsistent(double[] pcts, PercentileRanks values, long minVal
@Override
public void testEmptyAggregation() throws Exception {
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
- .addAggregation(
- histogram("histo").field("value")
- .interval(1L)
- .minDocCount(0)
- .subAggregation(
- percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value")
- .method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- )
- )
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
- Histogram histo = searchResponse.getAggregations().get("histo");
- assertThat(histo, notNullValue());
- Histogram.Bucket bucket = histo.getBuckets().get(1);
- assertThat(bucket, notNullValue());
-
- PercentileRanks reversePercentiles = bucket.getAggregations().get("percentile_ranks");
- assertThat(reversePercentiles, notNullValue());
- assertThat(reversePercentiles.getName(), equalTo("percentile_ranks"));
- assertThat(reversePercentiles.percent(10), equalTo(Double.NaN));
- assertThat(reversePercentiles.percent(15), equalTo(Double.NaN));
+ assertResponse(
+ prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
+ .addAggregation(
+ histogram("histo").field("value")
+ .interval(1L)
+ .minDocCount(0)
+ .subAggregation(
+ percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value")
+ .method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ )
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(2L));
+ Histogram histo = response.getAggregations().get("histo");
+ assertThat(histo, notNullValue());
+ Histogram.Bucket bucket = histo.getBuckets().get(1);
+ assertThat(bucket, notNullValue());
+
+ PercentileRanks reversePercentiles = bucket.getAggregations().get("percentile_ranks");
+ assertThat(reversePercentiles, notNullValue());
+ assertThat(reversePercentiles.getName(), equalTo("percentile_ranks"));
+ assertThat(reversePercentiles.percent(10), equalTo(Double.NaN));
+ assertThat(reversePercentiles.percent(15), equalTo(Double.NaN));
+ }
+ );
}
@Override
public void testUnmapped() throws Exception {
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 }).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- )
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
-
- PercentileRanks reversePercentiles = searchResponse.getAggregations().get("percentile_ranks");
- assertThat(reversePercentiles, notNullValue());
- assertThat(reversePercentiles.getName(), equalTo("percentile_ranks"));
- assertThat(reversePercentiles.percent(0), equalTo(Double.NaN));
- assertThat(reversePercentiles.percent(10), equalTo(Double.NaN));
- assertThat(reversePercentiles.percent(15), equalTo(Double.NaN));
- assertThat(reversePercentiles.percent(100), equalTo(Double.NaN));
+ assertResponse(
+ prepareSearch("idx_unmapped").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 }).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(0L));
+
+ PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks");
+ assertThat(reversePercentiles, notNullValue());
+ assertThat(reversePercentiles.getName(), equalTo("percentile_ranks"));
+ assertThat(reversePercentiles.percent(0), equalTo(Double.NaN));
+ assertThat(reversePercentiles.percent(10), equalTo(Double.NaN));
+ assertThat(reversePercentiles.percent(15), equalTo(Double.NaN));
+ assertThat(reversePercentiles.percent(100), equalTo(Double.NaN));
+ }
+ );
}
@Override
public void testSingleValuedField() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValue, maxValue);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ }
+ );
}
public void testNullValuesField() throws Exception {
@@ -201,84 +207,91 @@ public void testEmptyValuesField() throws Exception {
public void testSingleValuedFieldGetProperty() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValue, maxValue);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- global("global").subAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- )
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Global global = searchResponse.getAggregations().get("global");
- assertThat(global, notNullValue());
- assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(10L));
- assertThat(global.getAggregations(), notNullValue());
- assertThat(global.getAggregations().asMap().size(), equalTo(1));
-
- PercentileRanks values = global.getAggregations().get("percentile_ranks");
- assertThat(values, notNullValue());
- assertThat(values.getName(), equalTo("percentile_ranks"));
- assertThat(((InternalAggregation) global).getProperty("percentile_ranks"), sameInstance(values));
-
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ global("global").subAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ )
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Global global = response.getAggregations().get("global");
+ assertThat(global, notNullValue());
+ assertThat(global.getName(), equalTo("global"));
+ assertThat(global.getDocCount(), equalTo(10L));
+ assertThat(global.getAggregations(), notNullValue());
+ assertThat(global.getAggregations().asMap().size(), equalTo(1));
+
+ PercentileRanks values = global.getAggregations().get("percentile_ranks");
+ assertThat(values, notNullValue());
+ assertThat(values.getName(), equalTo("percentile_ranks"));
+ assertThat(((InternalAggregation) global).getProperty("percentile_ranks"), sameInstance(values));
+ }
+ );
}
public void testSingleValuedFieldOutsideRange() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = new double[] { minValue - 1, maxValue + 1 };
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ }
+ );
}
@Override
public void testSingleValuedFieldPartiallyUnmapped() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValue, maxValue);
- SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ assertResponse(
+ prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ }
+ );
}
@Override
public void testSingleValuedFieldWithValueScript() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValue - 1, maxValue - 1);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits);
+ }
+ );
}
@Override
@@ -287,74 +300,82 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception {
Map params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercents(minValue - 1, maxValue - 1);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits);
+ }
+ );
}
@Override
public void testMultiValuedField() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValues, maxValues);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("values")
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValues, maxValues, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("values")
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValues, maxValues, sigDigits);
+ }
+ );
}
@Override
public void testMultiValuedFieldWithValueScript() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValues - 1, maxValues - 1);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits);
+ }
+ );
}
public void testMultiValuedFieldWithValueScriptReverse() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(20 - maxValues, 20 - minValues);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap()))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, 20 - maxValues, 20 - minValues, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap()))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, 20 - maxValues, 20 - minValues, sigDigits);
+ }
+ );
}
@Override
@@ -363,37 +384,41 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
Map params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercents(minValues - 1, maxValues - 1);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits);
+ }
+ );
}
@Override
public void testScriptSingleValued() throws Exception {
int sigDigits = randomSignificantDigits();
final double[] pcts = randomPercents(minValue, maxValue);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue, maxValue, sigDigits);
+ }
+ );
}
@Override
@@ -405,18 +430,20 @@ public void testScriptSingleValuedWithParams() throws Exception {
Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params);
final double[] pcts = randomPercents(minValue - 1, maxValue - 1);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .script(script)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .script(script)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits);
+ }
+ );
}
@Override
@@ -426,18 +453,20 @@ public void testScriptMultiValued() throws Exception {
Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap());
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .script(script)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .script(script)
+ ),
+ response -> {
+ assertHitCount(response, 10);
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValues, maxValues, sigDigits);
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValues, maxValues, sigDigits);
+ }
+ );
}
@Override
@@ -446,87 +475,93 @@ public void testScriptMultiValuedWithParams() throws Exception {
Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES;
final double[] pcts = randomPercents(minValues - 1, maxValues - 1);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .script(script)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks");
- assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .script(script)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final PercentileRanks values = response.getAggregations().get("percentile_ranks");
+ assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits);
+ }
+ );
}
public void testOrderBySubAggregation() {
int sigDigits = randomSignificantDigits();
boolean asc = randomBoolean();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- histogram("histo").field("value")
- .interval(2L)
- .subAggregation(
- percentileRanks("percentile_ranks", new double[] { 99 }).field("value")
- .method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- )
- .order(BucketOrder.aggregation("percentile_ranks", "99", asc))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Histogram histo = searchResponse.getAggregations().get("histo");
- double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
- for (Histogram.Bucket bucket : histo.getBuckets()) {
- PercentileRanks values = bucket.getAggregations().get("percentile_ranks");
- double p99 = values.percent(99);
- if (asc) {
- assertThat(p99, greaterThanOrEqualTo(previous));
- } else {
- assertThat(p99, lessThanOrEqualTo(previous));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ histogram("histo").field("value")
+ .interval(2L)
+ .subAggregation(
+ percentileRanks("percentile_ranks", new double[] { 99 }).field("value")
+ .method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ )
+ .order(BucketOrder.aggregation("percentile_ranks", "99", asc))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Histogram histo = response.getAggregations().get("histo");
+ double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
+ for (Histogram.Bucket bucket : histo.getBuckets()) {
+ PercentileRanks values = bucket.getAggregations().get("percentile_ranks");
+ double p99 = values.percent(99);
+ if (asc) {
+ assertThat(p99, greaterThanOrEqualTo(previous));
+ } else {
+ assertThat(p99, lessThanOrEqualTo(previous));
+ }
+ previous = p99;
+ }
}
- previous = p99;
- }
+ );
}
@Override
public void testOrderByEmptyAggregation() throws Exception {
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- terms("terms").field("value")
- .order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true)))
- .subAggregation(
- filter("filter", termQuery("value", 100)).subAggregation(
- percentileRanks("ranks", new double[] { 99 }).method(PercentilesMethod.HDR).field("value")
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ terms("terms").field("value")
+ .order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true)))
+ .subAggregation(
+ filter("filter", termQuery("value", 100)).subAggregation(
+ percentileRanks("ranks", new double[] { 99 }).method(PercentilesMethod.HDR).field("value")
+ )
)
- )
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Terms terms = searchResponse.getAggregations().get("terms");
- assertThat(terms, notNullValue());
- List extends Terms.Bucket> buckets = terms.getBuckets();
- assertThat(buckets, notNullValue());
- assertThat(buckets.size(), equalTo(10));
-
- for (int i = 0; i < 10; i++) {
- Terms.Bucket bucket = buckets.get(i);
- assertThat(bucket, notNullValue());
- assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1));
- assertThat(bucket.getDocCount(), equalTo(1L));
- Filter filter = bucket.getAggregations().get("filter");
- assertThat(filter, notNullValue());
- assertThat(filter.getDocCount(), equalTo(0L));
- PercentileRanks ranks = filter.getAggregations().get("ranks");
- assertThat(ranks, notNullValue());
- assertThat(ranks.percent(99), equalTo(Double.NaN));
-
- }
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ List extends Terms.Bucket> buckets = terms.getBuckets();
+ assertThat(buckets, notNullValue());
+ assertThat(buckets.size(), equalTo(10));
+
+ for (int i = 0; i < 10; i++) {
+ Terms.Bucket bucket = buckets.get(i);
+ assertThat(bucket, notNullValue());
+ assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1));
+ assertThat(bucket.getDocCount(), equalTo(1L));
+ Filter filter = bucket.getAggregations().get("filter");
+ assertThat(filter, notNullValue());
+ assertThat(filter.getDocCount(), equalTo(0L));
+ PercentileRanks ranks = filter.getAggregations().get("ranks");
+ assertThat(ranks, notNullValue());
+ assertThat(ranks.percent(99), equalTo(Double.NaN));
+
+ }
+ }
+ );
}
/**
@@ -555,14 +590,14 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a nondeterministic script does not get cached
- SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR)
- .field("d")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR)
+ .field("d")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -574,14 +609,14 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a deterministic script gets cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR)
- .field("d")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR)
+ .field("d")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -593,10 +628,10 @@ public void testScriptCaching() throws Exception {
);
// Ensure that non-scripted requests are cached as normal
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR).field("d"))
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR).field("d"))
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -607,5 +642,4 @@ public void testScriptCaching() throws Exception {
equalTo(2L)
);
}
-
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java
index 3ac50c7b5e104..9eac8d4a06a43 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java
@@ -8,7 +8,6 @@
package org.elasticsearch.search.aggregations.metrics;
import org.apache.logging.log4j.LogManager;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.plugins.Plugin;
@@ -42,6 +41,7 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -102,143 +102,154 @@ private void assertConsistent(double[] pcts, Percentiles percentiles, long minVa
@Override
public void testEmptyAggregation() throws Exception {
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
- .addAggregation(
- histogram("histo").field("value")
- .interval(1L)
- .minDocCount(0)
- .subAggregation(
- percentiles("percentiles").field("value")
- .numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .percentiles(10, 15)
- )
- )
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
- Histogram histo = searchResponse.getAggregations().get("histo");
- assertThat(histo, notNullValue());
- Histogram.Bucket bucket = histo.getBuckets().get(1);
- assertThat(bucket, notNullValue());
-
- Percentiles percentiles = bucket.getAggregations().get("percentiles");
- assertThat(percentiles, notNullValue());
- assertThat(percentiles.getName(), equalTo("percentiles"));
- assertThat(percentiles.percentile(10), equalTo(Double.NaN));
- assertThat(percentiles.percentile(15), equalTo(Double.NaN));
+ assertResponse(
+ prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
+ .addAggregation(
+ histogram("histo").field("value")
+ .interval(1L)
+ .minDocCount(0)
+ .subAggregation(
+ percentiles("percentiles").field("value")
+ .numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .percentiles(10, 15)
+ )
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(2L));
+ Histogram histo = response.getAggregations().get("histo");
+ assertThat(histo, notNullValue());
+ Histogram.Bucket bucket = histo.getBuckets().get(1);
+ assertThat(bucket, notNullValue());
+
+ Percentiles percentiles = bucket.getAggregations().get("percentiles");
+ assertThat(percentiles, notNullValue());
+ assertThat(percentiles.getName(), equalTo("percentiles"));
+ assertThat(percentiles.percentile(10), equalTo(Double.NaN));
+ assertThat(percentiles.percentile(15), equalTo(Double.NaN));
+ }
+ );
}
@Override
public void testUnmapped() throws Exception {
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("value")
- .percentiles(0, 10, 15, 100)
- )
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
-
- Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertThat(percentiles, notNullValue());
- assertThat(percentiles.getName(), equalTo("percentiles"));
- assertThat(percentiles.percentile(0), equalTo(Double.NaN));
- assertThat(percentiles.percentile(10), equalTo(Double.NaN));
- assertThat(percentiles.percentile(15), equalTo(Double.NaN));
- assertThat(percentiles.percentile(100), equalTo(Double.NaN));
+ assertResponse(
+ prepareSearch("idx_unmapped").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("value")
+ .percentiles(0, 10, 15, 100)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(0L));
+
+ Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertThat(percentiles, notNullValue());
+ assertThat(percentiles.getName(), equalTo("percentiles"));
+ assertThat(percentiles.percentile(0), equalTo(Double.NaN));
+ assertThat(percentiles.percentile(10), equalTo(Double.NaN));
+ assertThat(percentiles.percentile(15), equalTo(Double.NaN));
+ assertThat(percentiles.percentile(100), equalTo(Double.NaN));
+ }
+ );
}
@Override
public void testSingleValuedField() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomIntBetween(1, 5);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("value")
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
- }
-
- @Override
- public void testSingleValuedFieldGetProperty() throws Exception {
- final double[] pcts = randomPercentiles();
- int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- global("global").subAggregation(
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
.method(PercentilesMethod.HDR)
.field("value")
.percentiles(pcts)
- )
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Global global = searchResponse.getAggregations().get("global");
- assertThat(global, notNullValue());
- assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(10L));
- assertThat(global.getAggregations(), notNullValue());
- assertThat(global.getAggregations().asMap().size(), equalTo(1));
+ ),
+ response -> {
+ assertHitCount(response, 10);
- Percentiles percentiles = global.getAggregations().get("percentiles");
- assertThat(percentiles, notNullValue());
- assertThat(percentiles.getName(), equalTo("percentiles"));
- assertThat(((InternalAggregation) global).getProperty("percentiles"), sameInstance(percentiles));
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
+ }
+ );
+ }
+ @Override
+ public void testSingleValuedFieldGetProperty() throws Exception {
+ final double[] pcts = randomPercentiles();
+ int sigDigits = randomSignificantDigits();
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ global("global").subAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("value")
+ .percentiles(pcts)
+ )
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Global global = response.getAggregations().get("global");
+ assertThat(global, notNullValue());
+ assertThat(global.getName(), equalTo("global"));
+ assertThat(global.getDocCount(), equalTo(10L));
+ assertThat(global.getAggregations(), notNullValue());
+ assertThat(global.getAggregations().asMap().size(), equalTo(1));
+
+ Percentiles percentiles = global.getAggregations().get("percentiles");
+ assertThat(percentiles, notNullValue());
+ assertThat(percentiles.getName(), equalTo("percentiles"));
+ assertThat(((InternalAggregation) global).getProperty("percentiles"), sameInstance(percentiles));
+ }
+ );
}
@Override
public void testSingleValuedFieldPartiallyUnmapped() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("value")
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
+ assertResponse(
+ prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("value")
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
+ }
+ );
}
@Override
public void testSingleValuedFieldWithValueScript() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
+ }
+ );
}
@Override
@@ -248,78 +259,86 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
+ }
+ );
}
@Override
public void testMultiValuedField() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("values")
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("values")
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
+ }
+ );
}
@Override
public void testMultiValuedFieldWithValueScript() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
+ }
+ );
}
public void testMultiValuedFieldWithValueScriptReverse() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap()))
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap()))
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits);
+ }
+ );
}
@Override
@@ -329,39 +348,43 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
+ }
+ );
}
@Override
public void testScriptSingleValued() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
+ }
+ );
}
@Override
@@ -373,19 +396,21 @@ public void testScriptSingleValuedWithParams() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .script(script)
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .script(script)
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
+ }
+ );
}
@Override
@@ -395,19 +420,21 @@ public void testScriptMultiValued() throws Exception {
Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap());
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .script(script)
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .script(script)
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
+ }
+ );
}
@Override
@@ -416,89 +443,96 @@ public void testScriptMultiValuedWithParams() throws Exception {
final double[] pcts = randomPercentiles();
int sigDigits = randomSignificantDigits();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
- .method(PercentilesMethod.HDR)
- .script(script)
- .percentiles(pcts)
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
- assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ percentiles("percentiles").numberOfSignificantValueDigits(sigDigits)
+ .method(PercentilesMethod.HDR)
+ .script(script)
+ .percentiles(pcts)
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ final Percentiles percentiles = response.getAggregations().get("percentiles");
+ assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
+
+ }
+ );
}
public void testOrderBySubAggregation() {
int sigDigits = randomSignificantDigits();
boolean asc = randomBoolean();
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- histogram("histo").field("value")
- .interval(2L)
- .subAggregation(
- percentiles("percentiles").field("value")
- .method(PercentilesMethod.HDR)
- .numberOfSignificantValueDigits(sigDigits)
- .percentiles(99)
- )
- .order(BucketOrder.aggregation("percentiles", "99", asc))
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Histogram histo = searchResponse.getAggregations().get("histo");
- double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
- for (Histogram.Bucket bucket : histo.getBuckets()) {
- Percentiles percentiles = bucket.getAggregations().get("percentiles");
- double p99 = percentiles.percentile(99);
- if (asc) {
- assertThat(p99, greaterThanOrEqualTo(previous));
- } else {
- assertThat(p99, lessThanOrEqualTo(previous));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ histogram("histo").field("value")
+ .interval(2L)
+ .subAggregation(
+ percentiles("percentiles").field("value")
+ .method(PercentilesMethod.HDR)
+ .numberOfSignificantValueDigits(sigDigits)
+ .percentiles(99)
+ )
+ .order(BucketOrder.aggregation("percentiles", "99", asc))
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Histogram histo = response.getAggregations().get("histo");
+ double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
+ for (Histogram.Bucket bucket : histo.getBuckets()) {
+ Percentiles percentiles = bucket.getAggregations().get("percentiles");
+ double p99 = percentiles.percentile(99);
+ if (asc) {
+ assertThat(p99, greaterThanOrEqualTo(previous));
+ } else {
+ assertThat(p99, lessThanOrEqualTo(previous));
+ }
+ previous = p99;
+ }
}
- previous = p99;
- }
+ );
}
@Override
public void testOrderByEmptyAggregation() throws Exception {
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- terms("terms").field("value")
- .order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true)))
- .subAggregation(
- filter("filter", termQuery("value", 100)).subAggregation(
- percentiles("percentiles").method(PercentilesMethod.HDR).field("value")
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ terms("terms").field("value")
+ .order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true)))
+ .subAggregation(
+ filter("filter", termQuery("value", 100)).subAggregation(
+ percentiles("percentiles").method(PercentilesMethod.HDR).field("value")
+ )
)
- )
- )
- .get();
-
- assertHitCount(searchResponse, 10);
-
- Terms terms = searchResponse.getAggregations().get("terms");
- assertThat(terms, notNullValue());
- List extends Terms.Bucket> buckets = terms.getBuckets();
- assertThat(buckets, notNullValue());
- assertThat(buckets.size(), equalTo(10));
-
- for (int i = 0; i < 10; i++) {
- Terms.Bucket bucket = buckets.get(i);
- assertThat(bucket, notNullValue());
- assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1));
- assertThat(bucket.getDocCount(), equalTo(1L));
- Filter filter = bucket.getAggregations().get("filter");
- assertThat(filter, notNullValue());
- assertThat(filter.getDocCount(), equalTo(0L));
- Percentiles percentiles = filter.getAggregations().get("percentiles");
- assertThat(percentiles, notNullValue());
- assertThat(percentiles.percentile(99), equalTo(Double.NaN));
-
- }
+ ),
+ response -> {
+ assertHitCount(response, 10);
+
+ Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ List extends Terms.Bucket> buckets = terms.getBuckets();
+ assertThat(buckets, notNullValue());
+ assertThat(buckets.size(), equalTo(10));
+
+ for (int i = 0; i < 10; i++) {
+ Terms.Bucket bucket = buckets.get(i);
+ assertThat(bucket, notNullValue());
+ assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1));
+ assertThat(bucket.getDocCount(), equalTo(1L));
+ Filter filter = bucket.getAggregations().get("filter");
+ assertThat(filter, notNullValue());
+ assertThat(filter.getDocCount(), equalTo(0L));
+ Percentiles percentiles = filter.getAggregations().get("percentiles");
+ assertThat(percentiles, notNullValue());
+ assertThat(percentiles.percentile(99), equalTo(Double.NaN));
+
+ }
+ }
+ );
}
/**
@@ -527,15 +561,15 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a nondeterministic script does not get cached
- SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- percentiles("foo").method(PercentilesMethod.HDR)
- .field("d")
- .percentiles(50.0)
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ percentiles("foo").method(PercentilesMethod.HDR)
+ .field("d")
+ .percentiles(50.0)
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -547,15 +581,15 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a deterministic script gets cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- percentiles("foo").method(PercentilesMethod.HDR)
- .field("d")
- .percentiles(50.0)
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ percentiles("foo").method(PercentilesMethod.HDR)
+ .field("d")
+ .percentiles(50.0)
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -567,10 +601,10 @@ public void testScriptCaching() throws Exception {
);
// Ensure that non-scripted requests are cached as normal
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0))
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0))
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -581,5 +615,4 @@ public void testScriptCaching() throws Exception {
equalTo(2L)
);
}
-
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java
index dae90424495a3..f494a339a7a71 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java
@@ -9,7 +9,6 @@
package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.action.index.IndexRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
@@ -46,6 +45,7 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
@@ -138,21 +138,24 @@ private static MedianAbsoluteDeviationAggregationBuilder randomBuilder() {
@Override
public void testEmptyAggregation() throws Exception {
- final SearchResponse response = prepareSearch("empty_bucket_idx").addAggregation(
- histogram("histogram").field("value").interval(1).minDocCount(0).subAggregation(randomBuilder().field("value"))
- ).get();
-
- assertHitCount(response, 2);
-
- final Histogram histogram = response.getAggregations().get("histogram");
- assertThat(histogram, notNullValue());
- final Histogram.Bucket bucket = histogram.getBuckets().get(1);
- assertThat(bucket, notNullValue());
-
- final MedianAbsoluteDeviation mad = bucket.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(mad.getMedianAbsoluteDeviation(), is(Double.NaN));
+ assertResponse(
+ prepareSearch("empty_bucket_idx").addAggregation(
+ histogram("histogram").field("value").interval(1).minDocCount(0).subAggregation(randomBuilder().field("value"))
+ ),
+ response -> {
+ assertHitCount(response, 2);
+
+ final Histogram histogram = response.getAggregations().get("histogram");
+ assertThat(histogram, notNullValue());
+ final Histogram.Bucket bucket = histogram.getBuckets().get(1);
+ assertThat(bucket, notNullValue());
+
+ final MedianAbsoluteDeviation mad = bucket.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(mad.getMedianAbsoluteDeviation(), is(Double.NaN));
+ }
+ );
}
@Override
@@ -162,68 +165,72 @@ public void testUnmapped() throws Exception {
@Override
public void testSingleValuedField() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(randomBuilder().field("value")).get();
+ assertResponse(prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(randomBuilder().field("value")), response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(singleValueExactMAD));
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(singleValueExactMAD));
+ });
}
@Override
public void testSingleValuedFieldGetProperty() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(global("global").subAggregation(randomBuilder().field("value")))
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final Global global = response.getAggregations().get("global");
- assertThat(global, notNullValue());
- assertThat(global.getName(), is("global"));
- assertThat(global.getDocCount(), is((long) NUMBER_OF_DOCS));
- assertThat(global.getAggregations(), notNullValue());
- assertThat(global.getAggregations().asMap().entrySet(), hasSize(1));
-
- final MedianAbsoluteDeviation mad = global.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(((InternalAggregation) global).getProperty("mad"), sameInstance(mad));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(global("global").subAggregation(randomBuilder().field("value"))),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final Global global = response.getAggregations().get("global");
+ assertThat(global, notNullValue());
+ assertThat(global.getName(), is("global"));
+ assertThat(global.getDocCount(), is((long) NUMBER_OF_DOCS));
+ assertThat(global.getAggregations(), notNullValue());
+ assertThat(global.getAggregations().asMap().entrySet(), hasSize(1));
+
+ final MedianAbsoluteDeviation mad = global.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(((InternalAggregation) global).getProperty("mad"), sameInstance(mad));
+ }
+ );
}
@Override
public void testSingleValuedFieldPartiallyUnmapped() throws Exception {
- final SearchResponse response = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery())
- .addAggregation(randomBuilder().field("value"))
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(singleValueExactMAD));
+ assertResponse(
+ prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()).addAggregation(randomBuilder().field("value")),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(singleValueExactMAD));
+ }
+ );
}
@Override
public void testSingleValuedFieldWithValueScript() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
-
- final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray());
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+
+ final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray());
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ }
+ );
}
@Override
@@ -231,53 +238,55 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception {
final Map params = new HashMap<>();
params.put("inc", 1);
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().field("value")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
-
- final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray());
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().field("value")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+
+ final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray());
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ }
+ );
}
@Override
public void testMultiValuedField() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(randomBuilder().field("values"))
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
+ assertResponse(prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(randomBuilder().field("values")), response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(multiValueExactMAD));
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(multiValueExactMAD));
+ });
}
@Override
public void testMultiValuedFieldWithValueScript() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
-
- final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample).map(point -> point + 1).toArray());
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+
+ final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample).map(point -> point + 1).toArray());
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ }
+ );
}
@Override
@@ -285,38 +294,42 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
final Map params = new HashMap<>();
params.put("inc", 1);
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().field("values")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
-
- final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample).map(point -> point + 1).toArray());
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().field("values")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+
+ final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample).map(point -> point + 1).toArray());
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ }
+ );
}
@Override
public void testScriptSingleValued() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().script(
- new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap())
- )
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(singleValueExactMAD));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().script(
+ new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap())
+ )
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(singleValueExactMAD));
+ }
+ );
}
@Override
@@ -324,38 +337,44 @@ public void testScriptSingleValuedWithParams() throws Exception {
final Map params = new HashMap<>();
params.put("inc", 1);
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params))
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().script(
+ new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params)
+ )
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
- final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray());
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray());
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ }
+ );
}
@Override
public void testScriptMultiValued() throws Exception {
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().script(
- new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap())
- )
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(multiValueExactMAD));
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().script(
+ new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap())
+ )
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(multiValueExactMAD));
+ }
+ );
}
@Override
@@ -363,107 +382,112 @@ public void testScriptMultiValuedWithParams() throws Exception {
final Map params = new HashMap<>();
params.put("inc", 1);
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- randomBuilder().script(
- new Script(
- ScriptType.INLINE,
- AggregationTestScriptsPlugin.NAME,
- "[ doc['value'].value, doc['value'].value + inc ]",
- params
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ randomBuilder().script(
+ new Script(
+ ScriptType.INLINE,
+ AggregationTestScriptsPlugin.NAME,
+ "[ doc['value'].value, doc['value'].value + inc ]",
+ params
+ )
)
- )
- )
- .get();
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
- assertHitCount(response, NUMBER_OF_DOCS);
+ final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getName(), is("mad"));
- final MedianAbsoluteDeviation mad = response.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getName(), is("mad"));
-
- final double fromIncrementedSampleMAD = calculateMAD(
- Arrays.stream(singleValueSample).flatMap(point -> LongStream.of(point, point + 1)).toArray()
+ final double fromIncrementedSampleMAD = calculateMAD(
+ Arrays.stream(singleValueSample).flatMap(point -> LongStream.of(point, point + 1)).toArray()
+ );
+ assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
+ }
);
- assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD));
}
public void testAsSubAggregation() throws Exception {
final int rangeBoundary = (MAX_SAMPLE_VALUE + MIN_SAMPLE_VALUE) / 2;
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- range("range").field("value")
- .addRange(MIN_SAMPLE_VALUE, rangeBoundary)
- .addRange(rangeBoundary, MAX_SAMPLE_VALUE)
- .subAggregation(randomBuilder().field("value"))
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final long[] lowerBucketSample = Arrays.stream(singleValueSample)
- .filter(point -> point >= MIN_SAMPLE_VALUE && point < rangeBoundary)
- .toArray();
- final long[] upperBucketSample = Arrays.stream(singleValueSample)
- .filter(point -> point >= rangeBoundary && point < MAX_SAMPLE_VALUE)
- .toArray();
-
- final Range range = response.getAggregations().get("range");
- assertThat(range, notNullValue());
- List extends Range.Bucket> buckets = range.getBuckets();
- assertThat(buckets, notNullValue());
- assertThat(buckets, hasSize(2));
-
- final Range.Bucket lowerBucket = buckets.get(0);
- assertThat(lowerBucket, notNullValue());
-
- final MedianAbsoluteDeviation lowerBucketMAD = lowerBucket.getAggregations().get("mad");
- assertThat(lowerBucketMAD, notNullValue());
- assertThat(lowerBucketMAD.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(lowerBucketSample)));
-
- final Range.Bucket upperBucket = buckets.get(1);
- assertThat(upperBucket, notNullValue());
-
- final MedianAbsoluteDeviation upperBucketMAD = upperBucket.getAggregations().get("mad");
- assertThat(upperBucketMAD, notNullValue());
- assertThat(upperBucketMAD.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(upperBucketSample)));
-
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ range("range").field("value")
+ .addRange(MIN_SAMPLE_VALUE, rangeBoundary)
+ .addRange(rangeBoundary, MAX_SAMPLE_VALUE)
+ .subAggregation(randomBuilder().field("value"))
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final long[] lowerBucketSample = Arrays.stream(singleValueSample)
+ .filter(point -> point >= MIN_SAMPLE_VALUE && point < rangeBoundary)
+ .toArray();
+ final long[] upperBucketSample = Arrays.stream(singleValueSample)
+ .filter(point -> point >= rangeBoundary && point < MAX_SAMPLE_VALUE)
+ .toArray();
+
+ final Range range = response.getAggregations().get("range");
+ assertThat(range, notNullValue());
+ List extends Range.Bucket> buckets = range.getBuckets();
+ assertThat(buckets, notNullValue());
+ assertThat(buckets, hasSize(2));
+
+ final Range.Bucket lowerBucket = buckets.get(0);
+ assertThat(lowerBucket, notNullValue());
+
+ final MedianAbsoluteDeviation lowerBucketMAD = lowerBucket.getAggregations().get("mad");
+ assertThat(lowerBucketMAD, notNullValue());
+ assertThat(lowerBucketMAD.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(lowerBucketSample)));
+
+ final Range.Bucket upperBucket = buckets.get(1);
+ assertThat(upperBucket, notNullValue());
+
+ final MedianAbsoluteDeviation upperBucketMAD = upperBucket.getAggregations().get("mad");
+ assertThat(upperBucketMAD, notNullValue());
+ assertThat(upperBucketMAD.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(upperBucketSample)));
+ }
+ );
}
@Override
public void testOrderByEmptyAggregation() throws Exception {
final int numberOfBuckets = 10;
- final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- terms("terms").field("value")
- .size(numberOfBuckets)
- .order(BucketOrder.compound(BucketOrder.aggregation("filter>mad", true)))
- .subAggregation(
- filter("filter", termQuery("value", MAX_SAMPLE_VALUE + 1)).subAggregation(randomBuilder().field("value"))
- )
- )
- .get();
-
- assertHitCount(response, NUMBER_OF_DOCS);
-
- final Terms terms = response.getAggregations().get("terms");
- assertThat(terms, notNullValue());
- List extends Terms.Bucket> buckets = terms.getBuckets();
- assertThat(buckets, notNullValue());
- assertThat(buckets, hasSize(numberOfBuckets));
-
- for (int i = 0; i < numberOfBuckets; i++) {
- Terms.Bucket bucket = buckets.get(i);
- assertThat(bucket, notNullValue());
-
- Filter filter = bucket.getAggregations().get("filter");
- assertThat(filter, notNullValue());
- assertThat(filter.getDocCount(), equalTo(0L));
-
- MedianAbsoluteDeviation mad = filter.getAggregations().get("mad");
- assertThat(mad, notNullValue());
- assertThat(mad.getMedianAbsoluteDeviation(), equalTo(Double.NaN));
- }
+ assertResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ terms("terms").field("value")
+ .size(numberOfBuckets)
+ .order(BucketOrder.compound(BucketOrder.aggregation("filter>mad", true)))
+ .subAggregation(
+ filter("filter", termQuery("value", MAX_SAMPLE_VALUE + 1)).subAggregation(randomBuilder().field("value"))
+ )
+ ),
+ response -> {
+ assertHitCount(response, NUMBER_OF_DOCS);
+
+ final Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ List extends Terms.Bucket> buckets = terms.getBuckets();
+ assertThat(buckets, notNullValue());
+ assertThat(buckets, hasSize(numberOfBuckets));
+
+ for (int i = 0; i < numberOfBuckets; i++) {
+ Terms.Bucket bucket = buckets.get(i);
+ assertThat(bucket, notNullValue());
+
+ Filter filter = bucket.getAggregations().get("filter");
+ assertThat(filter, notNullValue());
+ assertThat(filter.getDocCount(), equalTo(0L));
+
+ MedianAbsoluteDeviation mad = filter.getAggregations().get("mad");
+ assertThat(mad, notNullValue());
+ assertThat(mad.getMedianAbsoluteDeviation(), equalTo(Double.NaN));
+ }
+ }
+ );
}
/**
@@ -493,13 +517,13 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a nondeterministic script does not get cached
- SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- randomBuilder().field("d")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ randomBuilder().field("d")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -511,13 +535,13 @@ public void testScriptCaching() throws Exception {
);
// Test that a request using a deterministic script gets cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- randomBuilder().field("d")
- .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ randomBuilder().field("d")
+ .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -529,8 +553,7 @@ public void testScriptCaching() throws Exception {
);
// Ensure that non-scripted requests are cached as normal
- r = prepareSearch("cache_test_idx").setSize(0).addAggregation(randomBuilder().field("d")).get();
- assertNoFailures(r);
+ assertNoFailures(prepareSearch("cache_test_idx").setSize(0).addAggregation(randomBuilder().field("d")));
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java
index 2ea09960071f9..a6876f606ffee 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java
@@ -11,7 +11,6 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@@ -50,6 +49,7 @@
import static org.elasticsearch.search.aggregations.AggregationBuilders.scriptedMetric;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
@@ -359,37 +359,39 @@ public void testMap() {
Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap());
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript))
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
- int numShardsRun = 0;
- for (Object object : aggregationList) {
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Map.class));
- Map, ?> map = (Map, ?>) object;
- assertThat(map.size(), lessThanOrEqualTo(1));
- if (map.size() == 1) {
- assertThat(map.get("count"), notNullValue());
- assertThat(map.get("count"), instanceOf(Number.class));
- assertThat(map.get("count"), equalTo(1));
- numShardsRun++;
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
+ int numShardsRun = 0;
+ for (Object object : aggregationList) {
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Map.class));
+ Map, ?> map = (Map, ?>) object;
+ assertThat(map.size(), lessThanOrEqualTo(1));
+ if (map.size() == 1) {
+ assertThat(map.get("count"), notNullValue());
+ assertThat(map.get("count"), instanceOf(Number.class));
+ assertThat(map.get("count"), equalTo(1));
+ numShardsRun++;
+ }
+ }
+ // We don't know how many shards will have documents but we need to make
+ // sure that at least one shard ran the map script
+ assertThat(numShardsRun, greaterThan(0));
}
- }
- // We don't know how many shards will have documents but we need to make
- // sure that at least one shard ran the map script
- assertThat(numShardsRun, greaterThan(0));
+ );
}
public void testMapWithParams() {
@@ -401,45 +403,47 @@ public void testMapWithParams() {
Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap());
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(aggregationParams)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
- int numShardsRun = 0;
- for (Object object : aggregationList) {
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Map.class));
- Map, ?> map = (Map, ?>) object;
- for (Map.Entry, ?> entry : map.entrySet()) {
- assertThat(entry, notNullValue());
- assertThat(entry.getKey(), notNullValue());
- assertThat(entry.getKey(), instanceOf(String.class));
- assertThat(entry.getValue(), notNullValue());
- assertThat(entry.getValue(), instanceOf(Number.class));
- String stringValue = (String) entry.getKey();
- assertThat(stringValue, equalTo("12"));
- Number numberValue = (Number) entry.getValue();
- assertThat(numberValue, equalTo(1));
- numShardsRun++;
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(aggregationParams)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
+ int numShardsRun = 0;
+ for (Object object : aggregationList) {
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Map.class));
+ Map, ?> map = (Map, ?>) object;
+ for (Map.Entry, ?> entry : map.entrySet()) {
+ assertThat(entry, notNullValue());
+ assertThat(entry.getKey(), notNullValue());
+ assertThat(entry.getKey(), instanceOf(String.class));
+ assertThat(entry.getValue(), notNullValue());
+ assertThat(entry.getValue(), instanceOf(Number.class));
+ String stringValue = (String) entry.getKey();
+ assertThat(stringValue, equalTo("12"));
+ Number numberValue = (Number) entry.getValue();
+ assertThat(numberValue, equalTo(1));
+ numShardsRun++;
+ }
+ }
+ assertThat(numShardsRun, greaterThan(0));
}
- }
- assertThat(numShardsRun, greaterThan(0));
+ );
}
public void testInitMutatesParams() {
@@ -449,47 +453,56 @@ public void testInitMutatesParams() {
Map params = new HashMap<>();
params.put("vars", varsMap);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()))
- .mapScript(
- new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", Collections.emptyMap())
- )
- .combineScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()))
- .reduceScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()))
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
- long totalCount = 0;
- for (Object object : aggregationList) {
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(HashMap.class));
- @SuppressWarnings("unchecked")
- Map map = (Map) object;
- assertThat(map, hasKey("list"));
- assertThat(map.get("list"), instanceOf(List.class));
- List> list = (List>) map.get("list");
- for (Object o : list) {
- assertThat(o, notNullValue());
- assertThat(o, instanceOf(Number.class));
- Number numberValue = (Number) o;
- assertThat(numberValue, equalTo(3));
- totalCount += numberValue.longValue();
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()))
+ .mapScript(
+ new Script(
+ ScriptType.INLINE,
+ CustomScriptPlugin.NAME,
+ "state.list.add(vars.multiplier)",
+ Collections.emptyMap()
+ )
+ )
+ .combineScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()))
+ .reduceScript(
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap())
+ )
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
+ long totalCount = 0;
+ for (Object object : aggregationList) {
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(HashMap.class));
+ @SuppressWarnings("unchecked")
+ Map map = (Map) object;
+ assertThat(map, hasKey("list"));
+ assertThat(map.get("list"), instanceOf(List.class));
+ List> list = (List>) map.get("list");
+ for (Object o : list) {
+ assertThat(o, notNullValue());
+ assertThat(o, instanceOf(Number.class));
+ Number numberValue = (Number) o;
+ assertThat(numberValue, equalTo(3));
+ totalCount += numberValue.longValue();
+ }
+ }
+ assertThat(totalCount, equalTo(numDocs * 3));
}
- }
- assertThat(totalCount, equalTo(numDocs * 3));
+ );
}
public void testMapCombineWithParams() {
@@ -508,40 +521,42 @@ public void testMapCombineWithParams() {
);
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap());
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
- long totalCount = 0;
- for (Object object : aggregationList) {
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(List.class));
- List> list = (List>) object;
- for (Object o : list) {
- assertThat(o, notNullValue());
- assertThat(o, instanceOf(Number.class));
- Number numberValue = (Number) o;
- // A particular shard may not have any documents stored on it so
- // we have to assume the lower bound may be 0. The check at the
- // bottom of the test method will make sure the count is correct
- assertThat(numberValue.longValue(), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo(numDocs)));
- totalCount += numberValue.longValue();
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
+ long totalCount = 0;
+ for (Object object : aggregationList) {
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(List.class));
+ List> list = (List>) object;
+ for (Object o : list) {
+ assertThat(o, notNullValue());
+ assertThat(o, instanceOf(Number.class));
+ Number numberValue = (Number) o;
+ // A particular shard may not have any documents stored on it so
+ // we have to assume the lower bound may be 0. The check at the
+ // bottom of the test method will make sure the count is correct
+ assertThat(numberValue.longValue(), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo(numDocs)));
+ totalCount += numberValue.longValue();
+ }
+ }
+ assertThat(totalCount, equalTo(numDocs));
}
- }
- assertThat(totalCount, equalTo(numDocs));
+ );
}
public void testInitMapCombineWithParams() {
@@ -566,44 +581,46 @@ public void testInitMapCombineWithParams() {
);
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap());
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
- long totalCount = 0;
- for (Object object : aggregationList) {
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(List.class));
- List> list = (List>) object;
- for (Object o : list) {
- assertThat(o, notNullValue());
- assertThat(o, instanceOf(Number.class));
- Number numberValue = (Number) o;
- // A particular shard may not have any documents stored on it so
- // we have to assume the lower bound may be 0. The check at the
- // bottom of the test method will make sure the count is correct
- assertThat(numberValue.longValue(), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo(numDocs * 3)));
- totalCount += numberValue.longValue();
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(getNumShards("idx").numPrimaries));
+ long totalCount = 0;
+ for (Object object : aggregationList) {
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(List.class));
+ List> list = (List>) object;
+ for (Object o : list) {
+ assertThat(o, notNullValue());
+ assertThat(o, instanceOf(Number.class));
+ Number numberValue = (Number) o;
+ // A particular shard may not have any documents stored on it so
+ // we have to assume the lower bound may be 0. The check at the
+ // bottom of the test method will make sure the count is correct
+ assertThat(numberValue.longValue(), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo(numDocs * 3)));
+ totalCount += numberValue.longValue();
+ }
+ }
+ assertThat(totalCount, equalTo(numDocs * 3));
}
- }
- assertThat(totalCount, equalTo(numDocs * 3));
+ );
}
public void testInitMapCombineReduceWithParams() {
@@ -633,31 +650,33 @@ public void testInitMapCombineReduceWithParams() {
Collections.emptyMap()
);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ }
+ );
}
@SuppressWarnings("rawtypes")
@@ -688,42 +707,43 @@ public void testInitMapCombineReduceGetProperty() throws Exception {
Collections.emptyMap()
);
- SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- global("global").subAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- )
- .get();
-
- assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocs));
-
- Global global = searchResponse.getAggregations().get("global");
- assertThat(global, notNullValue());
- assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(numDocs));
- assertThat(global.getAggregations(), notNullValue());
- assertThat(global.getAggregations().asMap().size(), equalTo(1));
-
- ScriptedMetric scriptedMetricAggregation = global.getAggregations().get("scripted");
- assertThat(scriptedMetricAggregation, notNullValue());
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
- assertThat(((InternalAggregation) global).getProperty("scripted"), sameInstance(scriptedMetricAggregation));
- assertThat((List) ((InternalAggregation) global).getProperty("scripted.value"), sameInstance(aggregationList));
- assertThat((List) ((InternalAggregation) scriptedMetricAggregation).getProperty("value"), sameInstance(aggregationList));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ global("global").subAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ )
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Global global = response.getAggregations().get("global");
+ assertThat(global, notNullValue());
+ assertThat(global.getName(), equalTo("global"));
+ assertThat(global.getDocCount(), equalTo(numDocs));
+ assertThat(global.getAggregations(), notNullValue());
+ assertThat(global.getAggregations().asMap().size(), equalTo(1));
+
+ ScriptedMetric scriptedMetricAggregation = global.getAggregations().get("scripted");
+ assertThat(scriptedMetricAggregation, notNullValue());
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ assertThat(((InternalAggregation) global).getProperty("scripted"), sameInstance(scriptedMetricAggregation));
+ assertThat((List) ((InternalAggregation) global).getProperty("scripted.value"), sameInstance(aggregationList));
+ assertThat((List) ((InternalAggregation) scriptedMetricAggregation).getProperty("value"), sameInstance(aggregationList));
+ }
+ );
}
public void testMapCombineReduceWithParams() {
@@ -752,27 +772,29 @@ public void testMapCombineReduceWithParams() {
Collections.emptyMap()
);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs));
+ }
+ );
}
public void testInitMapReduceWithParams() {
@@ -797,31 +819,33 @@ public void testInitMapReduceWithParams() {
Collections.emptyMap()
);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ }
+ );
}
public void testMapReduceWithParams() {
@@ -844,27 +868,29 @@ public void testMapReduceWithParams() {
Collections.emptyMap()
);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs));
+ }
+ );
}
public void testInitMapCombineReduceWithParamsAndReduceParams() {
@@ -897,31 +923,33 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() {
reduceParams
);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs * 12));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs * 12));
+ }
+ );
}
public void testInitMapCombineReduceWithParamsStored() {
@@ -931,31 +959,33 @@ public void testInitMapCombineReduceWithParamsStored() {
Map params = new HashMap<>();
params.put("vars", varsMap);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(new Script(ScriptType.STORED, null, "initScript_stored", Collections.emptyMap()))
- .mapScript(new Script(ScriptType.STORED, null, "mapScript_stored", Collections.emptyMap()))
- .combineScript(new Script(ScriptType.STORED, null, "combineScript_stored", Collections.emptyMap()))
- .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap()))
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
-
- Aggregation aggregation = response.getAggregations().get("scripted");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .addAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(new Script(ScriptType.STORED, null, "initScript_stored", Collections.emptyMap()))
+ .mapScript(new Script(ScriptType.STORED, null, "mapScript_stored", Collections.emptyMap()))
+ .combineScript(new Script(ScriptType.STORED, null, "combineScript_stored", Collections.emptyMap()))
+ .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap()))
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+
+ Aggregation aggregation = response.getAggregations().get("scripted");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(numDocs * 3));
+ }
+ );
}
public void testInitMapCombineReduceWithParamsAsSubAgg() {
@@ -985,49 +1015,51 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() {
Collections.emptyMap()
);
- SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery())
- .setSize(1000)
- .addAggregation(
- histogram("histo").field("l_value")
- .interval(1)
- .subAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- )
- .get();
- assertNoFailures(response);
- assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
- Aggregation aggregation = response.getAggregations().get("histo");
- assertThat(aggregation, notNullValue());
- assertThat(aggregation, instanceOf(Histogram.class));
- Histogram histoAgg = (Histogram) aggregation;
- assertThat(histoAgg.getName(), equalTo("histo"));
- List extends Bucket> buckets = histoAgg.getBuckets();
- assertThat(buckets, notNullValue());
- for (Bucket b : buckets) {
- assertThat(b, notNullValue());
- assertThat(b.getDocCount(), equalTo(1L));
- Aggregations subAggs = b.getAggregations();
- assertThat(subAggs, notNullValue());
- assertThat(subAggs.asList().size(), equalTo(1));
- Aggregation subAgg = subAggs.get("scripted");
- assertThat(subAgg, notNullValue());
- assertThat(subAgg, instanceOf(ScriptedMetric.class));
- ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) subAgg;
- assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
- assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
- assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
- List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
- assertThat(aggregationList.size(), equalTo(1));
- Object object = aggregationList.get(0);
- assertThat(object, notNullValue());
- assertThat(object, instanceOf(Number.class));
- assertThat(((Number) object).longValue(), equalTo(3L));
- }
+ assertNoFailuresAndResponse(
+ prepareSearch("idx").setQuery(matchAllQuery())
+ .setSize(1000)
+ .addAggregation(
+ histogram("histo").field("l_value")
+ .interval(1)
+ .subAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ )
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(numDocs));
+ Aggregation aggregation = response.getAggregations().get("histo");
+ assertThat(aggregation, notNullValue());
+ assertThat(aggregation, instanceOf(Histogram.class));
+ Histogram histoAgg = (Histogram) aggregation;
+ assertThat(histoAgg.getName(), equalTo("histo"));
+ List extends Bucket> buckets = histoAgg.getBuckets();
+ assertThat(buckets, notNullValue());
+ for (Bucket b : buckets) {
+ assertThat(b, notNullValue());
+ assertThat(b.getDocCount(), equalTo(1L));
+ Aggregations subAggs = b.getAggregations();
+ assertThat(subAggs, notNullValue());
+ assertThat(subAggs.asList().size(), equalTo(1));
+ Aggregation subAgg = subAggs.get("scripted");
+ assertThat(subAgg, notNullValue());
+ assertThat(subAgg, instanceOf(ScriptedMetric.class));
+ ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) subAgg;
+ assertThat(scriptedMetricAggregation.getName(), equalTo("scripted"));
+ assertThat(scriptedMetricAggregation.aggregation(), notNullValue());
+ assertThat(scriptedMetricAggregation.aggregation(), instanceOf(ArrayList.class));
+ List> aggregationList = (List>) scriptedMetricAggregation.aggregation();
+ assertThat(aggregationList.size(), equalTo(1));
+ Object object = aggregationList.get(0);
+ assertThat(object, notNullValue());
+ assertThat(object, instanceOf(Number.class));
+ assertThat(((Number) object).longValue(), equalTo(3L));
+ }
+ }
+ );
}
public void testEmptyAggregation() throws Exception {
@@ -1057,36 +1089,38 @@ public void testEmptyAggregation() throws Exception {
Collections.emptyMap()
);
- SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
- .addAggregation(
- histogram("histo").field("value")
- .interval(1L)
- .minDocCount(0)
- .subAggregation(
- scriptedMetric("scripted").params(params)
- .initScript(initScript)
- .mapScript(mapScript)
- .combineScript(combineScript)
- .reduceScript(reduceScript)
- )
- )
- .get();
-
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
- Histogram histo = searchResponse.getAggregations().get("histo");
- assertThat(histo, notNullValue());
- Histogram.Bucket bucket = histo.getBuckets().get(1);
- assertThat(bucket, notNullValue());
-
- ScriptedMetric scriptedMetric = bucket.getAggregations().get("scripted");
- assertThat(scriptedMetric, notNullValue());
- assertThat(scriptedMetric.getName(), equalTo("scripted"));
- assertThat(scriptedMetric.aggregation(), notNullValue());
- assertThat(scriptedMetric.aggregation(), instanceOf(List.class));
- @SuppressWarnings("unchecked") // We'll just get a ClassCastException a couple lines down if we're wrong, its ok.
- List aggregationResult = (List) scriptedMetric.aggregation();
- assertThat(aggregationResult.size(), equalTo(1));
- assertThat(aggregationResult.get(0), equalTo(0));
+ assertNoFailuresAndResponse(
+ prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
+ .addAggregation(
+ histogram("histo").field("value")
+ .interval(1L)
+ .minDocCount(0)
+ .subAggregation(
+ scriptedMetric("scripted").params(params)
+ .initScript(initScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ )
+ ),
+ response -> {
+ assertThat(response.getHits().getTotalHits().value, equalTo(2L));
+ Histogram histo = response.getAggregations().get("histo");
+ assertThat(histo, notNullValue());
+ Histogram.Bucket bucket = histo.getBuckets().get(1);
+ assertThat(bucket, notNullValue());
+
+ ScriptedMetric scriptedMetric = bucket.getAggregations().get("scripted");
+ assertThat(scriptedMetric, notNullValue());
+ assertThat(scriptedMetric.getName(), equalTo("scripted"));
+ assertThat(scriptedMetric.aggregation(), notNullValue());
+ assertThat(scriptedMetric.aggregation(), instanceOf(List.class));
+ @SuppressWarnings("unchecked") // We'll just get a ClassCastException a couple lines down if we're wrong, its ok.
+ List aggregationResult = (List) scriptedMetric.aggregation();
+ assertThat(aggregationResult.size(), equalTo(1));
+ assertThat(aggregationResult.get(0), equalTo(0));
+ }
+ );
}
/**
@@ -1129,12 +1163,15 @@ public void testScriptCaching() throws Exception {
);
// Test that a non-deterministic init script causes the result to not be cached
- SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(
- scriptedMetric("foo").initScript(ndInitScript).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)
- )
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(
+ scriptedMetric("foo").initScript(ndInitScript)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript)
+ )
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -1146,10 +1183,10 @@ public void testScriptCaching() throws Exception {
);
// Test that a non-deterministic map script causes the result to not be cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(scriptedMetric("foo").mapScript(ndMapScript).combineScript(combineScript).reduceScript(reduceScript))
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(scriptedMetric("foo").mapScript(ndMapScript).combineScript(combineScript).reduceScript(reduceScript))
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -1161,10 +1198,10 @@ public void testScriptCaching() throws Exception {
);
// Test that a non-deterministic combine script causes the result to not be cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(ndRandom).reduceScript(reduceScript))
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(ndRandom).reduceScript(reduceScript))
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -1176,10 +1213,10 @@ public void testScriptCaching() throws Exception {
);
// NOTE: random reduce scripts don't hit the query shard context (they are done on the coordinator) and so can be cached.
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(ndRandom))
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(ndRandom))
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
@@ -1191,10 +1228,10 @@ public void testScriptCaching() throws Exception {
);
// Test that all deterministic scripts cause the request to be cached
- r = prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript))
- .get();
- assertNoFailures(r);
+ assertNoFailures(
+ prepareSearch("cache_test_idx").setSize(0)
+ .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript))
+ );
assertThat(
indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
index ab9ab37894f70..616e036f35233 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
@@ -630,8 +630,8 @@ public void testFetchFeatures() {
SearchHit hit = hits.getAt(0);
HighlightField highlightField = hit.getHighlightFields().get("text");
- assertThat(highlightField.getFragments().length, equalTo(1));
- assertThat(highlightField.getFragments()[0].string(), equalTo("some text to entertain"));
+ assertThat(highlightField.fragments().length, equalTo(1));
+ assertThat(highlightField.fragments()[0].string(), equalTo("some text to entertain"));
Explanation explanation = hit.getExplanation();
assertThat(explanation.toString(), containsString("text:text"));
@@ -903,8 +903,8 @@ public void testNestedFetchFeatures() {
assertThat(searchHit.getNestedIdentity().getOffset(), equalTo(0));
HighlightField highlightField = searchHit.getHighlightFields().get("comments.message");
- assertThat(highlightField.getFragments().length, equalTo(1));
- assertThat(highlightField.getFragments()[0].string(), equalTo("some comment "));
+ assertThat(highlightField.fragments().length, equalTo(1));
+ assertThat(highlightField.fragments()[0].string(), equalTo("some comment "));
// Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not
// even have matched with the main query.
@@ -961,8 +961,8 @@ public void testTopHitsInNested() throws Exception {
assertThat(extractValue("id", searchHits.getAt(j).getSourceAsMap()), equalTo(0));
HighlightField highlightField = searchHits.getAt(j).getHighlightFields().get("comments.message");
- assertThat(highlightField.getFragments().length, equalTo(1));
- assertThat(highlightField.getFragments()[0].string(), equalTo("some text "));
+ assertThat(highlightField.fragments().length, equalTo(1));
+ assertThat(highlightField.fragments()[0].string(), equalTo("some text "));
}
}
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java
index 54ad0cd7e0cff..0ccde7a62a09e 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java
@@ -41,7 +41,6 @@ protected Collection> nodePlugins() {
return Arrays.asList(MockFSIndexStore.TestPlugin.class);
}
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99174")
public void testRandomDirectoryIOExceptions() throws IOException, InterruptedException, ExecutionException {
String mapping = Strings.toString(
XContentFactory.jsonBuilder()
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
index 00c5342577231..dcfee8994b56b 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
@@ -27,6 +27,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
+import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
@@ -193,10 +194,8 @@ public void testSimpleNested() throws Exception {
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getTotalHits().value, equalTo(2L));
assertThat(innerHits.getHits().length, equalTo(1));
- assertThat(
- innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(),
- equalTo("fox eat quick")
- );
+ HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message");
+ assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick"));
assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(comments.message:fox in"));
assertThat(
innerHits.getAt(0).getFields().get("comments").getValue(),
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
index 79a28a053b3c2..6500b969ee273 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
@@ -3264,10 +3264,8 @@ public void testKeywordFieldHighlighting() throws IOException {
).get();
assertNoFailures(search);
assertThat(search.getHits().getTotalHits().value, equalTo(1L));
- assertThat(
- search.getHits().getAt(0).getHighlightFields().get("keyword_field").getFragments()[0].string(),
- equalTo("some text ")
- );
+ HighlightField highlightField = search.getHits().getAt(0).getHighlightFields().get("keyword_field");
+ assertThat(highlightField.fragments()[0].string(), equalTo("some text "));
}
public void testCopyToFields() throws Exception {
@@ -3294,8 +3292,8 @@ public void testCopyToFields() throws Exception {
assertHitCount(response, 1);
HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_copy");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("how now brown cow"));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("how now brown cow"));
}
public void testACopyFieldWithNestedQuery() throws Exception {
@@ -3343,9 +3341,9 @@ public void testACopyFieldWithNestedQuery() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo_text");
- assertThat(field.getFragments().length, equalTo(2));
- assertThat(field.getFragments()[0].string(), equalTo("brown "));
- assertThat(field.getFragments()[1].string(), equalTo("cow "));
+ assertThat(field.fragments().length, equalTo(2));
+ assertThat(field.fragments()[0].string(), equalTo("brown "));
+ assertThat(field.fragments()[1].string(), equalTo("cow "));
}
public void testFunctionScoreQueryHighlight() throws Exception {
@@ -3360,8 +3358,8 @@ public void testFunctionScoreQueryHighlight() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("brown "));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("brown "));
}
public void testFiltersFunctionScoreQueryHighlight() throws Exception {
@@ -3383,8 +3381,8 @@ public void testFiltersFunctionScoreQueryHighlight() throws Exception {
).highlighter(new HighlightBuilder().field(new Field("text"))).get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("brown "));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("brown "));
}
public void testHighlightQueryRewriteDatesWithNow() throws Exception {
@@ -3465,33 +3463,33 @@ public void testWithNestedQuery() throws Exception {
).highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))).get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
- assertThat(field.getFragments().length, equalTo(2));
- assertThat(field.getFragments()[0].string(), equalTo("brown shoes"));
- assertThat(field.getFragments()[1].string(), equalTo("cow "));
+ assertThat(field.fragments().length, equalTo(2));
+ assertThat(field.fragments()[0].string(), equalTo("brown shoes"));
+ assertThat(field.fragments()[1].string(), equalTo("cow "));
searchResponse = prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None))
.highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type)))
.get();
assertHitCount(searchResponse, 1);
field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("brown shoes"));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("brown shoes"));
searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None))
.highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type)))
.get();
assertHitCount(searchResponse, 1);
field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("brown shoes "));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("brown shoes "));
searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None))
.highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type)))
.get();
assertHitCount(searchResponse, 1);
field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("brown shoes"));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("brown shoes"));
}
// For unified and fvh highlighters we just check that the nested query is correctly extracted
@@ -3503,8 +3501,8 @@ public void testWithNestedQuery() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("brown "));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("brown "));
}
}
@@ -3526,8 +3524,8 @@ public void testWithNormalizer() throws Exception {
.get();
assertHitCount(searchResponse, 1);
HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("keyword");
- assertThat(field.getFragments().length, equalTo(1));
- assertThat(field.getFragments()[0].string(), equalTo("hello world "));
+ assertThat(field.fragments().length, equalTo(1));
+ assertThat(field.fragments()[0].string(), equalTo("hello world "));
}
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java
index 32a1d6724e0fd..1f86d4cb39ea4 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java
@@ -48,7 +48,6 @@ protected Collection> nodePlugins() {
}
/** Check that the reset method cleans up a feature */
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97780")
public void testResetSystemIndices() throws Exception {
String systemIndex1 = ".test-system-idx-1";
String systemIndex2 = ".second-test-system-idx-1";
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java
index 4721b1a186a99..7eaa49b27007d 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java
@@ -55,6 +55,7 @@
import java.nio.file.Path;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
@@ -496,6 +497,11 @@ private void restoreSnapshot(SnapshotInfo snapshotInfo, Releasable releasePrevio
final String[] indicesToRestore = indicesToRestoreList.toArray(new String[0]);
final String[] indicesToClose = indicesToCloseList.toArray(new String[0]);
final String[] indicesToDelete = indicesToDeleteList.toArray(new String[0]);
+ final String indicesToRestoreDescription = (restoreSpecificIndices ? "" : "*=") + Arrays.toString(indicesToRestore);
+
+ if (restoreSpecificIndices == false) {
+ assertEquals(Set.copyOf(snapshotInfo.indices()), Set.of(indicesToRestore));
+ }
final ListenableFuture closeIndicesStep = new ListenableFuture<>();
final ListenableFuture deleteIndicesStep = new ListenableFuture<>();
@@ -515,15 +521,17 @@ private void restoreSnapshot(SnapshotInfo snapshotInfo, Releasable releasePrevio
);
logger.info(
- "--> closing indices {} in preparation for restoring from [{}:{}]",
- indicesToRestoreList,
+ "--> closing indices {} in preparation for restoring {} from [{}:{}]",
+ indicesToClose,
+ indicesToRestoreDescription,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
indicesAdmin().prepareClose(indicesToClose).execute(mustSucceed(closeIndexResponse -> {
logger.info(
- "--> finished closing indices {} in preparation for restoring from [{}:{}]",
- indicesToRestoreList,
+ "--> finished closing indices {} in preparation for restoring {} from [{}:{}]",
+ indicesToClose,
+ indicesToRestoreDescription,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
@@ -538,15 +546,17 @@ private void restoreSnapshot(SnapshotInfo snapshotInfo, Releasable releasePrevio
if (indicesToDelete.length > 0) {
logger.info(
- "--> deleting indices {} in preparation for restoring from [{}:{}]",
- indicesToRestoreList,
+ "--> deleting indices {} in preparation for restoring {} from [{}:{}]",
+ indicesToDelete,
+ indicesToRestore,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
indicesAdmin().prepareDelete(indicesToDelete).execute(mustSucceed(deleteIndicesResponse -> {
logger.info(
- "--> finished deleting indices {} in preparation for restoring from [{}:{}]",
- indicesToRestoreList,
+ "--> finished deleting indices {} in preparation for restoring {} from [{}:{}]",
+ indicesToDelete,
+ indicesToRestoreDescription,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
@@ -569,9 +579,8 @@ private void restoreSnapshot(SnapshotInfo snapshotInfo, Releasable releasePrevio
}
logger.info(
- "--> restoring indices {}{} from [{}:{}]",
- restoreSpecificIndices ? "" : "*=",
- indicesToRestoreList,
+ "--> restoring indices {} from [{}:{}]",
+ indicesToRestoreDescription,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
@@ -579,7 +588,7 @@ private void restoreSnapshot(SnapshotInfo snapshotInfo, Releasable releasePrevio
restoreSnapshotRequestBuilder.execute(mustSucceed(restoreSnapshotResponse -> {
logger.info(
"--> triggered restore of indices {} from [{}:{}], waiting for green health",
- indicesToRestoreList,
+ indicesToRestoreDescription,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
@@ -590,7 +599,7 @@ private void restoreSnapshot(SnapshotInfo snapshotInfo, Releasable releasePrevio
logger.info(
"--> indices {} successfully restored from [{}:{}]",
- indicesToRestoreList,
+ indicesToRestoreDescription,
snapshotInfo.repository(),
snapshotInfo.snapshotId().getName()
);
diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java
index 4bbfe994f7f6d..5c5133e478ee1 100644
--- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java
+++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java
@@ -1131,12 +1131,7 @@ private enum ElasticsearchExceptionHandle {
UNKNOWN_VERSION_ADDED
),
// 26 was BatchOperationException
- SNAPSHOT_CREATION_EXCEPTION(
- org.elasticsearch.snapshots.SnapshotCreationException.class,
- org.elasticsearch.snapshots.SnapshotCreationException::new,
- 27,
- UNKNOWN_VERSION_ADDED
- ),
+ // 27 was SnapshotCreationException
// 28 was DeleteFailedEngineException, deprecated in 6.0, removed in 7.0
DOCUMENT_MISSING_EXCEPTION(
org.elasticsearch.index.engine.DocumentMissingException.class,
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index 0c7145730e447..b62a24fdc0b45 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -202,15 +202,17 @@ static TransportVersion def(int id) {
* If your git checkout has the expected minor-version-numbered branches and the expected release-version tags then you can find the
* transport versions known by a particular release ...
*
- * git show v8.9.1:server/src/main/java/org/elasticsearch/TransportVersions.java | grep def
+ * git show v8.11.0:server/src/main/java/org/elasticsearch/TransportVersions.java | grep '= def'
*
* ... or by a particular branch ...
*
- * git show 8.10:server/src/main/java/org/elasticsearch/TransportVersions.java | grep def
+ * git show 8.11:server/src/main/java/org/elasticsearch/TransportVersions.java | grep '= def'
*
* ... and you can see which versions were added in between two versions too ...
*
- * git diff 8.10..main -- server/src/main/java/org/elasticsearch/TransportVersions.java
+ * git diff v8.11.0..main -- server/src/main/java/org/elasticsearch/TransportVersions.java
+ *
+ * In branches 8.7-8.10 see server/src/main/java/org/elasticsearch/TransportVersion.java for the equivalent definitions.
*/
/**
diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java
index a2324010876bf..f51c700c8c8c9 100644
--- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java
+++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java
@@ -8,6 +8,9 @@
package org.elasticsearch.action.search;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
@@ -31,6 +34,7 @@
public class TransportMultiSearchAction extends HandledTransportAction {
+ private static final Logger logger = LogManager.getLogger(TransportMultiSearchAction.class);
private final int allocatedProcessors;
private final ThreadPool threadPool;
private final ClusterService clusterService;
@@ -155,6 +159,9 @@ public void onResponse(final SearchResponse searchResponse) {
@Override
public void onFailure(final Exception e) {
+ if (ExceptionsHelper.status(e).getStatus() >= 500 && ExceptionsHelper.isNodeOrShardUnavailableTypeException(e) == false) {
+ logger.warn("TransportMultiSearchAction failure", e);
+ }
handleResponse(request.responseSlot, new MultiSearchResponse.Item(null, e));
}
diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
index 5030bd875a0f6..a2d01e226b4ed 100644
--- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
+++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java
@@ -10,11 +10,13 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices;
+import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
@@ -278,7 +280,24 @@ public long buildTookInMillis() {
@Override
protected void doExecute(Task task, SearchRequest searchRequest, ActionListener listener) {
- executeRequest((SearchTask) task, searchRequest, listener, AsyncSearchActionProvider::new);
+ ActionListener loggingListener = listener.delegateFailureAndWrap((l, searchResponse) -> {
+ if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) {
+ // Deduplicate failures by exception message and index
+ ShardOperationFailedException[] groupedFailures = ExceptionsHelper.groupBy(searchResponse.getShardFailures());
+ for (ShardOperationFailedException f : groupedFailures) {
+ boolean causeHas500Status = false;
+ if (f.getCause() != null) {
+ causeHas500Status = ExceptionsHelper.status(f.getCause()).getStatus() >= 500;
+ }
+ if ((f.status().getStatus() >= 500 || causeHas500Status)
+ && ExceptionsHelper.isNodeOrShardUnavailableTypeException(f.getCause()) == false) {
+ logger.warn("TransportSearchAction shard failure (partial results response)", f);
+ }
+ }
+ }
+ l.onResponse(searchResponse);
+ });
+ executeRequest((SearchTask) task, searchRequest, loggingListener, AsyncSearchActionProvider::new);
}
void executeRequest(
diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java
index d097b10b7162d..9874bcfb56c6a 100644
--- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java
+++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java
@@ -8,7 +8,11 @@
package org.elasticsearch.action.search;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.service.ClusterService;
@@ -22,7 +26,7 @@
import static org.elasticsearch.action.search.TransportSearchHelper.parseScrollId;
public class TransportSearchScrollAction extends HandledTransportAction {
-
+ private static final Logger logger = LogManager.getLogger(TransportSearchScrollAction.class);
private final ClusterService clusterService;
private final SearchTransportService searchTransportService;
@@ -40,6 +44,19 @@ public TransportSearchScrollAction(
@Override
protected void doExecute(Task task, SearchScrollRequest request, ActionListener listener) {
+ ActionListener loggingListener = listener.delegateFailureAndWrap((l, searchResponse) -> {
+ if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) {
+ ShardOperationFailedException[] groupedFailures = ExceptionsHelper.groupBy(searchResponse.getShardFailures());
+ for (ShardOperationFailedException f : groupedFailures) {
+ Throwable cause = f.getCause() == null ? f : f.getCause();
+ if (ExceptionsHelper.status(cause).getStatus() >= 500
+ && ExceptionsHelper.isNodeOrShardUnavailableTypeException(cause) == false) {
+ logger.warn("TransportSearchScrollAction shard failure (partial results response)", f);
+ }
+ }
+ }
+ l.onResponse(searchResponse);
+ });
try {
ParsedScrollId scrollId = parseScrollId(request.scrollId());
Runnable action = switch (scrollId.getType()) {
@@ -50,7 +67,7 @@ protected void doExecute(Task task, SearchScrollRequest request, ActionListener<
request,
(SearchTask) task,
scrollId,
- listener
+ loggingListener
);
case QUERY_AND_FETCH_TYPE -> // TODO can we get rid of this?
new SearchScrollQueryAndFetchAsyncAction(
@@ -60,7 +77,7 @@ protected void doExecute(Task task, SearchScrollRequest request, ActionListener<
request,
(SearchTask) task,
scrollId,
- listener
+ loggingListener
);
default -> throw new IllegalArgumentException("Scroll id type [" + scrollId.getType() + "] unrecognized");
};
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java b/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java
new file mode 100644
index 0000000000000..05c0876669732
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.cluster.routing;
+
+import org.elasticsearch.cluster.ClusterInfo;
+import org.elasticsearch.cluster.metadata.IndexMetadata;
+import org.elasticsearch.cluster.metadata.Metadata;
+import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
+
+import java.util.Set;
+
+public class ExpectedShardSizeEstimator {
+
+ public static long getExpectedShardSize(ShardRouting shardRouting, long defaultSize, RoutingAllocation allocation) {
+ return getExpectedShardSize(
+ shardRouting,
+ defaultSize,
+ allocation.clusterInfo(),
+ allocation.snapshotShardSizeInfo(),
+ allocation.metadata(),
+ allocation.routingTable()
+ );
+ }
+
+ /**
+ * Returns the expected shard size for the given shard or the default value provided if not enough information are available
+ * to estimate the shards size.
+ */
+ public static long getExpectedShardSize(
+ ShardRouting shard,
+ long defaultValue,
+ ClusterInfo clusterInfo,
+ SnapshotShardSizeInfo snapshotShardSizeInfo,
+ Metadata metadata,
+ RoutingTable routingTable
+ ) {
+ final IndexMetadata indexMetadata = metadata.getIndexSafe(shard.index());
+ if (indexMetadata.getResizeSourceIndex() != null
+ && shard.active() == false
+ && shard.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) {
+ return getExpectedSizeOfResizedShard(shard, defaultValue, indexMetadata, clusterInfo, metadata, routingTable);
+ } else if (shard.unassigned() && shard.recoverySource().getType() == RecoverySource.Type.SNAPSHOT) {
+ return snapshotShardSizeInfo.getShardSize(shard, defaultValue);
+ } else {
+ return clusterInfo.getShardSize(shard, defaultValue);
+ }
+ }
+
+ private static long getExpectedSizeOfResizedShard(
+ ShardRouting shard,
+ long defaultValue,
+ IndexMetadata indexMetadata,
+ ClusterInfo clusterInfo,
+ Metadata metadata,
+ RoutingTable routingTable
+ ) {
+ // in the shrink index case we sum up the source index shards since we basically make a copy of the shard in the worst case
+ long targetShardSize = 0;
+ final Index mergeSourceIndex = indexMetadata.getResizeSourceIndex();
+ final IndexMetadata sourceIndexMetadata = metadata.index(mergeSourceIndex);
+ if (sourceIndexMetadata != null) {
+ final Set shardIds = IndexMetadata.selectRecoverFromShards(
+ shard.id(),
+ sourceIndexMetadata,
+ indexMetadata.getNumberOfShards()
+ );
+ final IndexRoutingTable indexRoutingTable = routingTable.index(mergeSourceIndex.getName());
+ for (int i = 0; i < indexRoutingTable.size(); i++) {
+ IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(i);
+ if (shardIds.contains(shardRoutingTable.shardId())) {
+ targetShardSize += clusterInfo.getShardSize(shardRoutingTable.primaryShard(), 0);
+ }
+ }
+ }
+ return targetShardSize == 0 ? defaultValue : targetShardSize;
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
index 8d336d2147e11..438c81b5fbb98 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
@@ -31,7 +31,6 @@
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type;
-import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
@@ -57,6 +56,7 @@
import java.util.stream.StreamSupport;
import static org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type.REPLACE;
+import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
import static org.elasticsearch.common.settings.ClusterSettings.createBuiltInClusterSettings;
@@ -1037,11 +1037,7 @@ private void allocateUnassigned() {
logger.trace("Assigned shard [{}] to [{}]", shard, minNode.getNodeId());
}
- final long shardSize = DiskThresholdDecider.getExpectedShardSize(
- shard,
- ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE,
- allocation
- );
+ final long shardSize = getExpectedShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, allocation);
shard = routingNodes.initializeShard(shard, minNode.getNodeId(), null, shardSize, allocation.changes());
minNode.addShard(shard);
if (shard.primary() == false) {
@@ -1064,11 +1060,7 @@ private void allocateUnassigned() {
if (minNode != null) {
// throttle decision scenario
assert allocationDecision.getAllocationStatus() == AllocationStatus.DECIDERS_THROTTLED;
- final long shardSize = DiskThresholdDecider.getExpectedShardSize(
- shard,
- ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE,
- allocation
- );
+ final long shardSize = getExpectedShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, allocation);
minNode.addShard(shard.initialize(minNode.getNodeId(), null, shardSize));
} else {
if (logger.isTraceEnabled()) {
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java
index 048ade3ef86c5..6fac97e34d022 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java
@@ -21,7 +21,6 @@
import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
-import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
@@ -40,6 +39,7 @@
import java.util.stream.IntStream;
import static org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type.REPLACE;
+import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
/**
* Given the current allocation of shards and the desired balance, performs the next (legal) shard movements towards the goal.
@@ -271,14 +271,7 @@ private void allocateUnassigned() {
switch (decision.type()) {
case YES -> {
logger.debug("Assigning shard [{}] to {} [{}]", shard, nodeIdsIterator.source, nodeId);
- final long shardSize = DiskThresholdDecider.getExpectedShardSize(
- shard,
- ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE,
- allocation.clusterInfo(),
- allocation.snapshotShardSizeInfo(),
- allocation.metadata(),
- allocation.routingTable()
- );
+ long shardSize = getExpectedShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, allocation);
routingNodes.initializeShard(shard, nodeId, null, shardSize, allocation.changes());
allocationOrdering.recordAllocation(nodeId);
if (shard.primary() == false) {
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java
index ee95074b8a730..11d2317f5bcea 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java
@@ -264,7 +264,7 @@ public void resetDesiredBalance() {
public DesiredBalanceStats getStats() {
return new DesiredBalanceStats(
- currentDesiredBalance.lastConvergedIndex(),
+ Math.max(currentDesiredBalance.lastConvergedIndex(), 0L),
desiredBalanceComputation.isActive(),
computationsSubmitted.count(),
computationsExecuted.count(),
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java
index c017d77362427..b8a1d3e1b899d 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceStats.java
@@ -33,6 +33,13 @@ public record DesiredBalanceStats(
private static final TransportVersion COMPUTED_SHARD_MOVEMENTS_VERSION = TransportVersions.V_8_8_0;
+ public DesiredBalanceStats {
+ if (lastConvergedIndex < 0) {
+ assert false : lastConvergedIndex;
+ throw new IllegalStateException("lastConvergedIndex must be nonnegative, but got [" + lastConvergedIndex + ']');
+ }
+ }
+
public static DesiredBalanceStats readFrom(StreamInput in) throws IOException {
return new DesiredBalanceStats(
in.readVLong(),
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
index d916aa7638786..fe001480e5f46 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
@@ -15,8 +15,6 @@
import org.elasticsearch.cluster.DiskUsage;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
-import org.elasticsearch.cluster.routing.IndexRoutingTable;
-import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingTable;
@@ -29,12 +27,10 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.unit.ByteSizeValue;
-import org.elasticsearch.index.Index;
-import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
import java.util.Map;
-import java.util.Set;
+
+import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
/**
* The {@link DiskThresholdDecider} checks that the node a shard is potentially
@@ -541,61 +537,6 @@ private Decision earlyTerminate(Map usages) {
return null;
}
- public static long getExpectedShardSize(ShardRouting shardRouting, long defaultSize, RoutingAllocation allocation) {
- return DiskThresholdDecider.getExpectedShardSize(
- shardRouting,
- defaultSize,
- allocation.clusterInfo(),
- allocation.snapshotShardSizeInfo(),
- allocation.metadata(),
- allocation.routingTable()
- );
- }
-
- /**
- * Returns the expected shard size for the given shard or the default value provided if not enough information are available
- * to estimate the shards size.
- */
- public static long getExpectedShardSize(
- ShardRouting shard,
- long defaultValue,
- ClusterInfo clusterInfo,
- SnapshotShardSizeInfo snapshotShardSizeInfo,
- Metadata metadata,
- RoutingTable routingTable
- ) {
- final IndexMetadata indexMetadata = metadata.getIndexSafe(shard.index());
- if (indexMetadata.getResizeSourceIndex() != null
- && shard.active() == false
- && shard.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) {
- // in the shrink index case we sum up the source index shards since we basically make a copy of the shard in
- // the worst case
- long targetShardSize = 0;
- final Index mergeSourceIndex = indexMetadata.getResizeSourceIndex();
- final IndexMetadata sourceIndexMeta = metadata.index(mergeSourceIndex);
- if (sourceIndexMeta != null) {
- final Set shardIds = IndexMetadata.selectRecoverFromShards(
- shard.id(),
- sourceIndexMeta,
- indexMetadata.getNumberOfShards()
- );
- final IndexRoutingTable indexRoutingTable = routingTable.index(mergeSourceIndex.getName());
- for (int i = 0; i < indexRoutingTable.size(); i++) {
- IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(i);
- if (shardIds.contains(shardRoutingTable.shardId())) {
- targetShardSize += clusterInfo.getShardSize(shardRoutingTable.primaryShard(), 0);
- }
- }
- }
- return targetShardSize == 0 ? defaultValue : targetShardSize;
- } else {
- if (shard.unassigned() && shard.recoverySource().getType() == RecoverySource.Type.SNAPSHOT) {
- return snapshotShardSizeInfo.getShardSize(shard, defaultValue);
- }
- return clusterInfo.getShardSize(shard, defaultValue);
- }
- }
-
record DiskUsageWithRelocations(DiskUsage diskUsage, long relocatingShardSize) {
double getFreeDiskAsPercentage() {
diff --git a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java
index 4d50764aa0cc1..ba8a8458b08cc 100644
--- a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java
+++ b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java
@@ -41,6 +41,7 @@
*/
public class HealthPeriodicLogger implements ClusterStateListener, Closeable, SchedulerEngine.Listener {
public static final String HEALTH_FIELD_PREFIX = "elasticsearch.health";
+ public static final String MESSAGE_FIELD = "message";
public static final Setting POLL_INTERVAL_SETTING = Setting.timeSetting(
"health.periodic_logger.poll_interval",
@@ -193,6 +194,7 @@ static Map convertToLoggedFields(List ind
// overall status
final HealthStatus status = HealthStatus.merge(indicatorResults.stream().map(HealthIndicatorResult::status));
result.put(String.format(Locale.ROOT, "%s.overall.status", HEALTH_FIELD_PREFIX), status.xContentValue());
+ result.put(MESSAGE_FIELD, String.format(Locale.ROOT, "health=%s", status.xContentValue()));
// top-level status for each indicator
indicatorResults.forEach((indicatorResult) -> {
diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 141a06eff0ec6..d217f6b844fe8 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -759,13 +759,11 @@ private static String loadHistoryUUID(Map commitData) {
private ExternalReaderManager createReaderManager(RefreshWarmerListener externalRefreshListener) throws EngineException {
boolean success = false;
+ ElasticsearchDirectoryReader directoryReader = null;
ElasticsearchReaderManager internalReaderManager = null;
try {
try {
- final ElasticsearchDirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(
- DirectoryReader.open(indexWriter),
- shardId
- );
+ directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId);
lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo();
internalReaderManager = createInternalReaderManager(directoryReader);
ExternalReaderManager externalReaderManager = new ExternalReaderManager(internalReaderManager, externalRefreshListener);
@@ -782,7 +780,9 @@ private ExternalReaderManager createReaderManager(RefreshWarmerListener external
}
} finally {
if (success == false) { // release everything we created on a failure
- IOUtils.closeWhileHandlingException(internalReaderManager, indexWriter);
+ // make sure that we close the directory reader even if the internal reader manager has failed to initialize
+ var reader = internalReaderManager == null ? directoryReader : internalReaderManager;
+ IOUtils.closeWhileHandlingException(reader, indexWriter);
}
}
}
diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java
index addc6f33c9eba..a18ea0f90ec08 100644
--- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java
+++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java
@@ -112,7 +112,7 @@ protected BinaryDocValues getBinaryDocValues(LeafReaderContext context, String f
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
- selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
+ selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, maxChildren);
}
return selectedValues;
}
diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java
index 827e1618adde2..e8d4363ca9932 100644
--- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java
+++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java
@@ -90,7 +90,7 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, long mis
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
- return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
+ return sortMode.select(values, missingValue, rootDocs, innerDocs, maxChildren);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
index 996c6243064e9..17af6259ca27c 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
@@ -613,8 +613,12 @@ private static void postProcessDynamicArrayMapping(DocumentParserContext context
|| context.isCopyToField(fullFieldName)
|| mappers.size() < MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING
|| mappers.size() > MAX_DIMS_COUNT
+ // Anything that is NOT a number or anything that IS a number but not mapped to `float` should NOT be mapped to dense_vector
|| mappers.stream()
- .allMatch(m -> m instanceof NumberFieldMapper.Builder nb && nb.type != NumberFieldMapper.NumberType.FLOAT)) {
+ .anyMatch(
+ m -> m instanceof NumberFieldMapper.Builder == false
+ || ((NumberFieldMapper.Builder) m).type != NumberFieldMapper.NumberType.FLOAT
+ )) {
return;
}
diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index f4812f280f917..2491d13784483 100644
--- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -285,6 +285,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
private final LongSupplier relativeTimeInNanosSupplier;
private volatile long startedRelativeTimeInNanos;
private volatile long indexingTimeBeforeShardStartedInNanos;
+ private final SubscribableListener waitForEngineOrClosedShardListeners = new SubscribableListener<>();
// the translog keeps track of the GCP, but unpromotable shards have no translog so we need to track the GCP here instead
private volatile long globalCheckPointIfUnpromotable;
@@ -1658,6 +1659,7 @@ public void close(String reason, boolean flushEngine) throws IOException {
synchronized (mutex) {
changeState(IndexShardState.CLOSED, reason);
}
+ checkAndCallWaitForEngineOrClosedShardListeners();
} finally {
final Engine engine = this.currentEngineReference.getAndSet(null);
try {
@@ -2016,6 +2018,7 @@ private void innerOpenEngineAndTranslog(LongSupplier globalCheckpointSupplier) t
onSettingsChanged();
assert assertSequenceNumbersInCommit();
recoveryState.validateCurrentStage(RecoveryState.Stage.TRANSLOG);
+ checkAndCallWaitForEngineOrClosedShardListeners();
}
private boolean assertSequenceNumbersInCommit() throws IOException {
@@ -4181,10 +4184,28 @@ public void waitForSegmentGeneration(long segmentGeneration, ActionListener listener) {
+ waitForEngineOrClosedShardListeners.addListener(listener);
+ }
+
/**
* Registers a listener for an event when the shard advances to the provided primary term and segment generation
*/
public void waitForPrimaryTermAndGeneration(long primaryTerm, long segmentGeneration, ActionListener listener) {
- getEngine().addPrimaryTermAndGenerationListener(primaryTerm, segmentGeneration, listener);
+ waitForEngineOrClosedShard(
+ listener.delegateFailureAndWrap(
+ (l, ignored) -> getEngine().addPrimaryTermAndGenerationListener(primaryTerm, segmentGeneration, l)
+ )
+ );
}
+
}
diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java
index 4c1df376ebf63..a4f641fd6f071 100644
--- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java
+++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java
@@ -468,11 +468,6 @@ public boolean sourceRequested() {
return fetchSourceContext != null && fetchSourceContext.fetchSource();
}
- @Override
- public boolean hasFetchSourceContext() {
- return fetchSourceContext != null;
- }
-
@Override
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java
index 9137d5c97248d..2b5d9cb17b4f4 100644
--- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java
+++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java
@@ -543,7 +543,7 @@ public int docID() {
}
@Override
- public long longValue() throws IOException {
+ public long longValue() {
return value;
}
};
@@ -571,7 +571,6 @@ public NumericDocValues select(
final long missingValue,
final BitSet parentDocs,
final DocIdSetIterator childDocs,
- int maxDoc,
int maxChildren
) throws IOException {
if (parentDocs == null || childDocs == null) {
@@ -654,7 +653,7 @@ public boolean advanceExact(int target) throws IOException {
}
@Override
- public double doubleValue() throws IOException {
+ public double doubleValue() {
return this.value;
}
};
@@ -804,7 +803,6 @@ public BinaryDocValues select(
final BytesRef missingValue,
final BitSet parentDocs,
final DocIdSetIterator childDocs,
- int maxDoc,
int maxChildren
) throws IOException {
if (parentDocs == null || childDocs == null) {
diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java
index 9fa99bb4a773f..6c04f6feddc96 100644
--- a/server/src/main/java/org/elasticsearch/search/SearchHit.java
+++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java
@@ -1001,7 +1001,7 @@ private static Map parseHighlightFields(XContentParser p
Map highlightFields = new HashMap<>();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
HighlightField highlightField = HighlightField.fromXContent(parser);
- highlightFields.put(highlightField.getName(), highlightField);
+ highlightFields.put(highlightField.name(), highlightField);
}
return highlightFields;
}
diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java
index 44a8f641fae91..6ee02fa9425c0 100644
--- a/server/src/main/java/org/elasticsearch/search/SearchService.java
+++ b/server/src/main/java/org/elasticsearch/search/SearchService.java
@@ -739,7 +739,7 @@ public void executeQueryPhase(
SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext)
) {
searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(null));
- processScroll(request, readerContext, searchContext);
+ processScroll(request, searchContext);
QueryPhase.execute(searchContext);
executor.success();
readerContext.setRescoreDocIds(searchContext.rescoreDocIds());
@@ -830,7 +830,7 @@ public void executeFetchPhase(
) {
searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(null));
searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(null));
- processScroll(request, readerContext, searchContext);
+ processScroll(request, searchContext);
searchContext.addQueryResult();
QueryPhase.execute(searchContext);
final long afterQueryTime = executor.success();
@@ -1506,7 +1506,7 @@ private static void shortcutDocIdsToLoad(SearchContext context) {
context.docIdsToLoad(docIdsToLoad);
}
- private static void processScroll(InternalScrollSearchRequest request, ReaderContext reader, SearchContext context) {
+ private static void processScroll(InternalScrollSearchRequest request, SearchContext context) {
// process scroll
context.from(context.from() + context.size());
context.scrollContext().scroll = request.scroll();
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java
index d89d9b07e57bc..516c9d91a7b65 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java
@@ -179,7 +179,7 @@ private class MergeBucketsPhase extends CollectionPhase {
* Sorts the indices of values
by their underlying value
* This will produce a merge map whose application will sort values
*/
- private class ClusterSorter extends InPlaceMergeSorter {
+ private static class ClusterSorter extends InPlaceMergeSorter {
final DoubleArray values;
final long[] indexes;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java
index d80eb8a58040e..23c26794f6bb5 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java
@@ -77,9 +77,6 @@ public static TDigestState create(double compression, TDigestExecutionHint execu
return switch (executionHint) {
case HIGH_ACCURACY -> createOptimizedForAccuracy(compression);
case DEFAULT -> create(compression);
- default -> throw new IllegalArgumentException(
- "Unexpected TDigestExecutionHint in TDigestState initialization: " + executionHint
- );
};
}
@@ -99,7 +96,6 @@ protected TDigestState(Type type, double compression) {
case AVL_TREE -> TDigest.createAvlTreeDigest(compression);
case SORTING -> TDigest.createSortingDigest();
case MERGING -> TDigest.createMergingDigest(compression);
- default -> throw new IllegalArgumentException("Unexpected TDigestState type: " + type);
};
this.type = type;
this.compression = compression;
diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java
index 049e06b0d98c7..f787e30644658 100644
--- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java
@@ -204,6 +204,6 @@ public CollapseContext build(SearchExecutionContext searchExecutionContext) {
);
}
- return new CollapseContext(field, fieldType, innerHits);
+ return new CollapseContext(field, fieldType);
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java
index 62d7f7cc74cd4..080caaeed0fde 100644
--- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java
+++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java
@@ -11,23 +11,18 @@
import org.apache.lucene.search.Sort;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.CollapseType;
-import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.lucene.grouping.SinglePassGroupingCollector;
-import java.util.List;
-
/**
* Context used for field collapsing
*/
public class CollapseContext {
private final String fieldName;
private final MappedFieldType fieldType;
- private final List innerHits;
- public CollapseContext(String fieldName, MappedFieldType fieldType, List innerHits) {
+ public CollapseContext(String fieldName, MappedFieldType fieldType) {
this.fieldName = fieldName;
this.fieldType = fieldType;
- this.innerHits = innerHits;
}
/**
@@ -42,11 +37,6 @@ public MappedFieldType getFieldType() {
return fieldType;
}
- /** The inner hit options to expand the collapsed results **/
- public List getInnerHit() {
- return innerHits;
- }
-
public SinglePassGroupingCollector> createTopDocs(Sort sort, int topN, FieldDoc after) {
if (fieldType.collapseType() == CollapseType.KEYWORD) {
return SinglePassGroupingCollector.createKeyword(fieldName, fieldType, sort, topN, after);
diff --git a/server/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java b/server/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java
index 1bd70b5c14817..0ce6824ec432b 100644
--- a/server/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java
+++ b/server/src/main/java/org/elasticsearch/search/dfs/AggregatedDfs.java
@@ -21,9 +21,9 @@
public class AggregatedDfs implements Writeable {
- private Map termStatistics;
- private Map fieldStatistics;
- private long maxDoc;
+ private final Map termStatistics;
+ private final Map fieldStatistics;
+ private final long maxDoc;
public AggregatedDfs(StreamInput in) throws IOException {
int size = in.readVInt();
@@ -51,10 +51,6 @@ public Map fieldStatistics() {
return fieldStatistics;
}
- public long maxDoc() {
- return maxDoc;
- }
-
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeMap(termStatistics, (o, k) -> {
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
index 5a04404c2e38a..91ac7356a9670 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
@@ -91,7 +91,7 @@ private static class PreloadedSourceProvider implements SourceProvider {
Source source;
@Override
- public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
+ public Source getSource(LeafReaderContext ctx, int doc) {
return source;
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsContext.java b/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsContext.java
index ae0e52ab69091..c3a91fde896bd 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsContext.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsContext.java
@@ -115,7 +115,7 @@ public boolean equals(Object o) {
StoredFieldsContext that = (StoredFieldsContext) o;
if (fetchFields != that.fetchFields) return false;
- return fieldNames != null ? fieldNames.equals(that.fieldNames) : that.fieldNames == null;
+ return Objects.equals(fieldNames, that.fieldNames);
}
@@ -164,7 +164,7 @@ public static StoredFieldsContext fromXContent(String fieldName, XContentParser
return fromList(Collections.singletonList(parser.text()));
} else if (token == XContentParser.Token.START_ARRAY) {
ArrayList list = new ArrayList<>();
- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
list.add(parser.text());
}
return fromList(list);
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsSpec.java b/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsSpec.java
index 45054a90c749f..48aea98887ff0 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsSpec.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsSpec.java
@@ -27,12 +27,12 @@ public boolean noRequirements() {
/**
* Use when no stored fields are required
*/
- public static StoredFieldsSpec NO_REQUIREMENTS = new StoredFieldsSpec(false, false, Set.of());
+ public static final StoredFieldsSpec NO_REQUIREMENTS = new StoredFieldsSpec(false, false, Set.of());
/**
* Use when the source should be loaded but no other stored fields are required
*/
- public static StoredFieldsSpec NEEDS_SOURCE = new StoredFieldsSpec(true, false, Set.of());
+ public static final StoredFieldsSpec NEEDS_SOURCE = new StoredFieldsSpec(true, false, Set.of());
/**
* Combine these stored field requirements with those from another StoredFieldsSpec
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java
index bba614dce78a5..4587d7560b2d9 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java
@@ -26,8 +26,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.Map;
-import java.util.function.Function;
/**
* Context used to fetch the {@code _source}.
@@ -42,7 +40,6 @@ public class FetchSourceContext implements Writeable, ToXContentObject {
private final boolean fetchSource;
private final String[] includes;
private final String[] excludes;
- private Function, Map> filter;
public static FetchSourceContext of(boolean fetchSource) {
return fetchSource ? FETCH_SOURCE : DO_NOT_FETCH_SOURCE;
@@ -153,33 +150,9 @@ public static FetchSourceContext fromXContent(XContentParser parser) throws IOEx
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (INCLUDES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
- List includesList = new ArrayList<>();
- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- if (token == XContentParser.Token.VALUE_STRING) {
- includesList.add(parser.text());
- } else {
- throw new ParsingException(
- parser.getTokenLocation(),
- "Unknown key for a " + token + " in [" + currentFieldName + "].",
- parser.getTokenLocation()
- );
- }
- }
- includes = includesList.toArray(Strings.EMPTY_ARRAY);
+ includes = parseStringArray(parser, currentFieldName);
} else if (EXCLUDES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
- List excludesList = new ArrayList<>();
- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- if (token == XContentParser.Token.VALUE_STRING) {
- excludesList.add(parser.text());
- } else {
- throw new ParsingException(
- parser.getTokenLocation(),
- "Unknown key for a " + token + " in [" + currentFieldName + "].",
- parser.getTokenLocation()
- );
- }
- }
- excludes = excludesList.toArray(Strings.EMPTY_ARRAY);
+ excludes = parseStringArray(parser, currentFieldName);
} else {
throw new ParsingException(
parser.getTokenLocation(),
@@ -227,6 +200,25 @@ public static FetchSourceContext fromXContent(XContentParser parser) throws IOEx
return FetchSourceContext.of(fetchSource, includes, excludes);
}
+ private static String[] parseStringArray(XContentParser parser, String currentFieldName) throws IOException {
+ XContentParser.Token token;
+ String[] excludes;
+ List excludesList = new ArrayList<>();
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ if (token == XContentParser.Token.VALUE_STRING) {
+ excludesList.add(parser.text());
+ } else {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Unknown key for a " + token + " in [" + currentFieldName + "].",
+ parser.getTokenLocation()
+ );
+ }
+ }
+ excludes = excludesList.toArray(Strings.EMPTY_ARRAY);
+ return excludes;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (fetchSource) {
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java
index 3207f1ffa99f0..36cda88a063ec 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java
@@ -451,13 +451,6 @@ public HB boundaryScannerLocale(String boundaryScannerLocale) {
return (HB) this;
}
- /**
- * @return the value set by {@link #boundaryScannerLocale(String)}
- */
- public Locale boundaryScannerLocale() {
- return this.boundaryScannerLocale;
- }
-
/**
* Allows to set custom options for custom highlighters.
*/
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java
index 31e704fe30ff9..cae353bb91014 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java
@@ -8,7 +8,6 @@
package org.elasticsearch.search.fetch.subphase.highlight;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.WeightedSpanTerm;
@@ -22,18 +21,6 @@
public final class CustomQueryScorer extends QueryScorer {
- public CustomQueryScorer(Query query, IndexReader reader, String field, String defaultField) {
- super(query, reader, field, defaultField);
- }
-
- public CustomQueryScorer(Query query, IndexReader reader, String field) {
- super(query, reader, field);
- }
-
- public CustomQueryScorer(Query query, String field, String defaultField) {
- super(query, field, defaultField);
- }
-
public CustomQueryScorer(Query query, String field) {
super(query, field);
}
@@ -42,10 +29,6 @@ public CustomQueryScorer(Query query) {
super(query);
}
- public CustomQueryScorer(WeightedSpanTerm[] weightedTerms) {
- super(weightedTerms);
- }
-
@Override
protected WeightedSpanTermExtractor newTermExtractor(String defaultField) {
return defaultField == null ? new CustomWeightedSpanTermExtractor() : new CustomWeightedSpanTermExtractor(defaultField);
@@ -69,7 +52,6 @@ protected void extractUnknownQuery(Query query, Map te
protected void extract(Query query, float boost, Map terms) throws IOException {
if (isChildOrParentQuery(query.getClass())) {
// skip has_child or has_parent queries, see: https://github.com/elastic/elasticsearch/issues/14999
- return;
} else if (query instanceof FunctionScoreQuery) {
super.extract(((FunctionScoreQuery) query).getSubQuery(), boost, terms);
} else if (query instanceof ESToParentBlockJoinQuery) {
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java
index d90aba24a94df..e77436ba61423 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java
@@ -32,7 +32,6 @@
import org.elasticsearch.lucene.search.uhighlight.Snippet;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import java.io.IOException;
import java.text.BreakIterator;
@@ -120,7 +119,7 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) {
fieldContext.context.getSearchExecutionContext().getIndexAnalyzer(f -> Lucene.KEYWORD_ANALYZER),
queryMaxAnalyzedOffset
);
- PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder);
+ PassageFormatter passageFormatter = getPassageFormatter(fieldContext.field, encoder);
IndexSearcher searcher = fieldContext.context.searcher();
OffsetSource offsetSource = getOffsetSource(fieldContext.context, fieldContext.fieldType);
BreakIterator breakIterator;
@@ -161,7 +160,7 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) {
);
}
- protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchHighlightContext.Field field, Encoder encoder) {
+ protected PassageFormatter getPassageFormatter(SearchHighlightContext.Field field, Encoder encoder) {
return new CustomPassageFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder);
}
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java
index 75a1777ae7d8f..8417c9d747981 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java
@@ -312,6 +312,6 @@ private static class FieldHighlightEntry {
private static class HighlighterEntry {
public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh;
- public Map fields = new HashMap<>();
+ public final Map fields = new HashMap<>();
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java
index 9c761936863d6..5421cd59a23e4 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java
@@ -8,15 +8,11 @@
package org.elasticsearch.search.fetch.subphase.highlight;
-import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo;
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo.SubInfo;
import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
import org.apache.lucene.util.CollectionUtil;
-import org.elasticsearch.index.analysis.AnalyzerComponentsProvider;
-import org.elasticsearch.index.analysis.NamedAnalyzer;
-import org.elasticsearch.index.analysis.TokenFilterFactory;
import java.util.List;
@@ -45,7 +41,7 @@ public static WeightedFragInfo fixWeightedFragInfo(WeightedFragInfo fragInfo) {
CollectionUtil.introSort(subInfos, (o1, o2) -> {
int startOffset = o1.getTermsOffsets().get(0).getStartOffset();
int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset();
- return compare(startOffset, startOffset2);
+ return Integer.compare(startOffset, startOffset2);
});
return new WeightedFragInfo(
Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()),
@@ -58,23 +54,4 @@ public static WeightedFragInfo fixWeightedFragInfo(WeightedFragInfo fragInfo) {
}
}
- private static int compare(int x, int y) {
- return (x < y) ? -1 : ((x == y) ? 0 : 1);
- }
-
- private static boolean containsBrokenAnalysis(Analyzer analyzer) {
- // TODO maybe we need a getter on Namedanalyzer that tells if this uses broken Analysis
- if (analyzer instanceof NamedAnalyzer) {
- analyzer = ((NamedAnalyzer) analyzer).analyzer();
- }
- if (analyzer instanceof AnalyzerComponentsProvider) {
- final TokenFilterFactory[] tokenFilters = ((AnalyzerComponentsProvider) analyzer).getComponents().getTokenFilters();
- for (TokenFilterFactory tokenFilterFactory : tokenFilters) {
- if (tokenFilterFactory.breaksFastVectorHighlighter()) {
- return true;
- }
- }
- }
- return false;
- }
}
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java
index 7d371ac372774..0042b1eafba71 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java
@@ -45,8 +45,6 @@
* @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight()
*/
public final class HighlightBuilder extends AbstractHighlighterBuilder {
- /** default for whether to highlight fields based on the source even if stored separately */
- public static final boolean DEFAULT_FORCE_SOURCE = false;
/** default for whether a field should be highlighted only if a query matches that field */
public static final boolean DEFAULT_REQUIRE_FIELD_MATCH = true;
/** default for whether to stop highlighting at the defined max_analyzed_offset to avoid exceptions for longer texts */
@@ -149,17 +147,6 @@ public HighlightBuilder field(String name) {
return field(new Field(name));
}
- /**
- * Adds a field to be highlighted with a provided fragment size (in characters), and
- * default number of fragments of 5.
- *
- * @param name The field to highlight
- * @param fragmentSize The size of a fragment in characters
- */
- public HighlightBuilder field(String name, int fragmentSize) {
- return field(new Field(name).fragmentSize(fragmentSize));
- }
-
/**
* Adds a field to be highlighted with a provided fragment size (in characters), and
* a provided (maximum) number of fragments.
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java
index d4b5234f4e0b2..6bc9f65ac655f 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java
@@ -30,23 +30,12 @@
*/
public class HighlightField implements ToXContentFragment, Writeable {
- private String name;
+ private final String name;
- private Text[] fragments;
+ private final Text[] fragments;
public HighlightField(StreamInput in) throws IOException {
- name = in.readString();
- if (in.readBoolean()) {
- int size = in.readVInt();
- if (size == 0) {
- fragments = Text.EMPTY_ARRAY;
- } else {
- fragments = new Text[size];
- for (int i = 0; i < size; i++) {
- fragments[i] = in.readText();
- }
- }
- }
+ this(in.readString(), in.readOptionalArray(StreamInput::readText, Text[]::new));
}
public HighlightField(String name, Text[] fragments) {
@@ -61,13 +50,6 @@ public String name() {
return name;
}
- /**
- * The name of the field highlighted.
- */
- public String getName() {
- return name();
- }
-
/**
* The highlighted fragments. {@code null} if failed to highlight (for example, the field is not stored).
*/
@@ -75,13 +57,6 @@ public Text[] fragments() {
return fragments;
}
- /**
- * The highlighted fragments. {@code null} if failed to highlight (for example, the field is not stored).
- */
- public Text[] getFragments() {
- return fragments();
- }
-
@Override
public String toString() {
return "[" + name + "], fragments[" + Arrays.toString(fragments) + "]";
@@ -101,14 +76,14 @@ public void writeTo(StreamOutput out) throws IOException {
public static HighlightField fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser);
String fieldName = parser.currentName();
- Text[] fragments = null;
+ Text[] fragments;
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
List values = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
values.add(new Text(parser.text()));
}
- fragments = values.toArray(new Text[values.size()]);
+ fragments = values.toArray(Text.EMPTY_ARRAY);
} else if (token == XContentParser.Token.VALUE_NULL) {
fragments = null;
} else {
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java
index f1bb3f2c773ac..79c7198564be5 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java
@@ -8,7 +8,6 @@
package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.highlight.Encoder;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
@@ -20,8 +19,6 @@
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder {
@@ -51,19 +48,7 @@ public SourceScoreOrderFragmentsBuilder(
@Override
protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException {
// we know its low level reader, and matching docId, since that's how we call the highlighter with
- List values = valueFetcher.fetchValues(source, docId, new ArrayList<>());
- if (values.size() > 1 && fetchContext.sourceLoader().reordersFieldValues()) {
- throw new IllegalArgumentException(
- "The fast vector highlighter doesn't support loading multi-valued fields from _source in index ["
- + fetchContext.getIndexName()
- + "] because _source can reorder field values"
- );
- }
- Field[] fields = new Field[values.size()];
- for (int i = 0; i < values.size(); i++) {
- fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED);
- }
- return fields;
+ return SourceSimpleFragmentsBuilder.doGetFields(docId, valueFetcher, source, fetchContext, fieldType);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java
index 0a7a5d300339b..c6b69717b8f75 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java
@@ -46,6 +46,11 @@ public SourceSimpleFragmentsBuilder(
@Override
protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException {
// we know its low level reader, and matching docId, since that's how we call the highlighter with
+ return doGetFields(docId, valueFetcher, source, fetchContext, fieldType);
+ }
+
+ static Field[] doGetFields(int docId, ValueFetcher valueFetcher, Source source, FetchContext fetchContext, MappedFieldType fieldType)
+ throws IOException {
List values = valueFetcher.fetchValues(source, docId, new ArrayList<>());
if (values.isEmpty()) {
return EMPTY_FIELDS;
@@ -63,5 +68,4 @@ protected Field[] getFields(IndexReader reader, int docId, String fieldName) thr
}
return fields;
}
-
}
diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
index 3c69db98c7588..b7c77e4968854 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
@@ -265,7 +265,7 @@ private static LeafSlice[] computeSlices(List leaves, int min
List sortedLeaves = new ArrayList<>(leaves);
// Sort by maxDoc, descending:
final Comparator leafComparator = Comparator.comparingInt(l -> l.reader().maxDoc());
- Collections.sort(sortedLeaves, leafComparator.reversed());
+ sortedLeaves.sort(leafComparator.reversed());
// we add the groups on a priority queue, so we can add orphan leafs to the smallest group
final Comparator> groupComparator = Comparator.comparingInt(
l -> l.stream().mapToInt(lr -> lr.reader().maxDoc()).sum()
diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java
index 794e429bbc473..ecb7833558a6b 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java
@@ -142,28 +142,7 @@ public void searchNearestVectors(String field, byte[] target, KnnCollector colle
in.searchNearestVectors(field, target, collector, acceptDocs);
return;
}
- // when acceptDocs is null due to no doc deleted, we will instantiate a new one that would
- // match all docs to allow timeout checking.
- final Bits updatedAcceptDocs = acceptDocs == null ? new Bits.MatchAllBits(maxDoc()) : acceptDocs;
- Bits timeoutCheckingAcceptDocs = new Bits() {
- private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10;
- private int calls;
-
- @Override
- public boolean get(int index) {
- if (calls++ % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) {
- queryCancellation.checkCancelled();
- }
-
- return updatedAcceptDocs.get(index);
- }
-
- @Override
- public int length() {
- return updatedAcceptDocs.length();
- }
- };
- in.searchNearestVectors(field, target, collector, timeoutCheckingAcceptDocs);
+ in.searchNearestVectors(field, target, collector, new TimeOutCheckingBits(acceptDocs));
}
@Override
@@ -181,29 +160,32 @@ public void searchNearestVectors(String field, float[] target, KnnCollector coll
in.searchNearestVectors(field, target, collector, acceptDocs);
return;
}
- // when acceptDocs is null due to no doc deleted, we will instantiate a new one that would
- // match all docs to allow timeout checking.
- final Bits updatedAcceptDocs = acceptDocs == null ? new Bits.MatchAllBits(maxDoc()) : acceptDocs;
- Bits timeoutCheckingAcceptDocs = new Bits() {
- private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10;
- private int calls;
-
- @Override
- public boolean get(int index) {
- if (calls++ % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) {
- queryCancellation.checkCancelled();
- }
-
- return updatedAcceptDocs.get(index);
- }
+ in.searchNearestVectors(field, target, collector, new TimeOutCheckingBits(acceptDocs));
+ }
+
+ private class TimeOutCheckingBits implements Bits {
+ private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = 10;
+ private final Bits updatedAcceptDocs;
+ private int calls;
- @Override
- public int length() {
- return updatedAcceptDocs.length();
+ TimeOutCheckingBits(Bits acceptDocs) {
+ // when acceptDocs is null due to no doc deleted, we will instantiate a new one that would
+ // match all docs to allow timeout checking.
+ this.updatedAcceptDocs = acceptDocs == null ? new Bits.MatchAllBits(maxDoc()) : acceptDocs;
+ }
+
+ @Override
+ public boolean get(int index) {
+ if (calls++ % MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK == 0) {
+ queryCancellation.checkCancelled();
}
- };
+ return updatedAcceptDocs.get(index);
+ }
- in.searchNearestVectors(field, target, collector, acceptDocs);
+ @Override
+ public int length() {
+ return updatedAcceptDocs.length();
+ }
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java
index 5dc0374b73fc6..07fa169642dbf 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java
@@ -251,8 +251,7 @@ public void searchNearestVectors(String field, float[] target, KnnCollector coll
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("FieldUsageTrackingLeafReader(reader=");
- return sb.append(in).append(')').toString();
+ return "FieldUsageTrackingLeafReader(reader=" + in + ')';
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
index 67a265127026d..8bd91c9b9cfe7 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
@@ -174,11 +174,6 @@ public boolean sourceRequested() {
return in.sourceRequested();
}
- @Override
- public boolean hasFetchSourceContext() {
- return in.hasFetchSourceContext();
- }
-
@Override
public FetchSourceContext fetchSourceContext() {
return in.fetchSourceContext();
diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java
index b8886c3e79a8c..911b647067e63 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java
@@ -54,11 +54,6 @@ public Scroll scroll() {
return scroll;
}
- public InternalScrollSearchRequest scroll(Scroll scroll) {
- this.scroll = scroll;
- return this;
- }
-
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) {
return new SearchShardTask(id, type, action, getDescription(), parentTaskId, headers);
diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java
index 274dc233ff5c7..ef67d3d19e42f 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java
@@ -182,8 +182,6 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) {
*/
public abstract boolean sourceRequested();
- public abstract boolean hasFetchSourceContext();
-
public abstract FetchSourceContext fetchSourceContext();
public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext);
diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
index fbfcfdf9500ed..fe9cfdc87695e 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
@@ -240,7 +240,7 @@ public ShardSearchRequest(
this.originalIndices = originalIndices;
this.readerId = readerId;
this.keepAlive = keepAlive;
- assert keepAlive == null || readerId != null : "readerId: " + readerId + " keepAlive: " + keepAlive;
+ assert keepAlive == null || readerId != null : "readerId: null keepAlive: " + keepAlive;
this.channelVersion = TransportVersion.current();
this.waitForCheckpoint = waitForCheckpoint;
this.waitForCheckpointsTimeout = waitForCheckpointsTimeout;
@@ -334,7 +334,7 @@ public ShardSearchRequest(StreamInput in) throws IOException {
readerId = null;
keepAlive = null;
}
- assert keepAlive == null || readerId != null : "readerId: " + readerId + " keepAlive: " + keepAlive;
+ assert keepAlive == null || readerId != null : "readerId: null keepAlive: " + keepAlive;
channelVersion = TransportVersion.min(TransportVersion.readVersion(in), in.getTransportVersion());
if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) {
waitForCheckpoint = in.readLong();
diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java
index f1fd984aec5ba..8b4824e42cbf4 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java
@@ -107,11 +107,6 @@ public boolean sourceRequested() {
return fetchSourceContext != null && fetchSourceContext.fetchSource();
}
- @Override
- public boolean hasFetchSourceContext() {
- return fetchSourceContext != null;
- }
-
@Override
public FetchSourceContext fetchSourceContext() {
return fetchSourceContext;
diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java
index bd6971dceb7be..988ea24d0fcc2 100644
--- a/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java
+++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java
@@ -102,30 +102,27 @@ private FieldFactoryWrapper getFactoryForField(String fieldName) {
// Load the field data on behalf of the script. Otherwise, it would require
// additional permissions to deal with pagedbytes/ramusagestimator/etc.
- return AccessController.doPrivileged(new PrivilegedAction() {
- @Override
- public FieldFactoryWrapper run() {
- FieldFactoryWrapper fieldFactory = null;
- IndexFieldData> indexFieldData = fieldDataLookup.apply(fieldType, SCRIPT);
+ return AccessController.doPrivileged((PrivilegedAction) () -> {
+ IndexFieldData> indexFieldData = fieldDataLookup.apply(fieldType, SCRIPT);
- FieldFactoryWrapper docFactory = null;
+ FieldFactoryWrapper docFactory = null;
- if (docFactoryCache.isEmpty() == false) {
- docFactory = docFactoryCache.get(fieldName);
- }
+ if (docFactoryCache.isEmpty() == false) {
+ docFactory = docFactoryCache.get(fieldName);
+ }
- // if this field has already been accessed via the doc-access API and the field-access API
- // uses doc values then we share to avoid double-loading
- if (docFactory != null && indexFieldData instanceof SourceValueFetcherIndexFieldData == false) {
- fieldFactory = docFactory;
- } else {
- fieldFactory = new FieldFactoryWrapper(indexFieldData.load(reader).getScriptFieldFactory(fieldName));
- }
+ // if this field has already been accessed via the doc-access API and the field-access API
+ // uses doc values then we share to avoid double-loading
+ FieldFactoryWrapper fieldFactory;
+ if (docFactory != null && indexFieldData instanceof SourceValueFetcherIndexFieldData == false) {
+ fieldFactory = docFactory;
+ } else {
+ fieldFactory = new FieldFactoryWrapper(indexFieldData.load(reader).getScriptFieldFactory(fieldName));
+ }
- fieldFactoryCache.put(fieldName, fieldFactory);
+ fieldFactoryCache.put(fieldName, fieldFactory);
- return fieldFactory;
- }
+ return fieldFactory;
});
}
@@ -150,35 +147,32 @@ private FieldFactoryWrapper getFactoryForDoc(String fieldName) {
// Load the field data on behalf of the script. Otherwise, it would require
// additional permissions to deal with pagedbytes/ramusagestimator/etc.
- return AccessController.doPrivileged(new PrivilegedAction() {
- @Override
- public FieldFactoryWrapper run() {
- FieldFactoryWrapper docFactory = null;
- FieldFactoryWrapper fieldFactory = null;
-
- if (fieldFactoryCache.isEmpty() == false) {
- fieldFactory = fieldFactoryCache.get(fieldName);
- }
+ return AccessController.doPrivileged((PrivilegedAction) () -> {
+ FieldFactoryWrapper docFactory = null;
+ FieldFactoryWrapper fieldFactory = null;
- if (fieldFactory != null) {
- IndexFieldData> fieldIndexFieldData = fieldDataLookup.apply(fieldType, SCRIPT);
+ if (fieldFactoryCache.isEmpty() == false) {
+ fieldFactory = fieldFactoryCache.get(fieldName);
+ }
- // if this field has already been accessed via the field-access API and the field-access API
- // uses doc values then we share to avoid double-loading
- if (fieldIndexFieldData instanceof SourceValueFetcherIndexFieldData == false) {
- docFactory = fieldFactory;
- }
- }
+ if (fieldFactory != null) {
+ IndexFieldData> fieldIndexFieldData = fieldDataLookup.apply(fieldType, SCRIPT);
- if (docFactory == null) {
- IndexFieldData> indexFieldData = fieldDataLookup.apply(fieldType, SEARCH);
- docFactory = new FieldFactoryWrapper(indexFieldData.load(reader).getScriptFieldFactory(fieldName));
+ // if this field has already been accessed via the field-access API and the field-access API
+ // uses doc values then we share to avoid double-loading
+ if (fieldIndexFieldData instanceof SourceValueFetcherIndexFieldData == false) {
+ docFactory = fieldFactory;
}
+ }
- docFactoryCache.put(fieldName, docFactory);
-
- return docFactory;
+ if (docFactory == null) {
+ IndexFieldData> indexFieldData = fieldDataLookup.apply(fieldType, SEARCH);
+ docFactory = new FieldFactoryWrapper(indexFieldData.load(reader).getScriptFieldFactory(fieldName));
}
+
+ docFactoryCache.put(fieldName, docFactory);
+
+ return docFactory;
});
}
diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java
index 4b075523c5286..84f04283d64e8 100644
--- a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java
+++ b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java
@@ -320,11 +320,6 @@ public boolean sourceRequested() {
throw new UnsupportedOperationException();
}
- @Override
- public boolean hasFetchSourceContext() {
- throw new UnsupportedOperationException();
- }
-
@Override
public FetchSourceContext fetchSourceContext() {
throw new UnsupportedOperationException();
diff --git a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java
index 389e3a56cf152..c873717fe55e7 100644
--- a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java
+++ b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java
@@ -197,9 +197,6 @@ public void setScoreMode(QueryRescoreMode scoreMode) {
this.scoreMode = scoreMode;
}
- public void setScoreMode(String scoreMode) {
- setScoreMode(QueryRescoreMode.fromString(scoreMode));
- }
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java
index a7977c18d338c..de081fd386d54 100644
--- a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java
+++ b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java
@@ -114,7 +114,6 @@ private class DistanceScorer extends Scorer {
private final TwoPhaseIterator twoPhase;
private final DocIdSetIterator disi;
private final float weight;
- private double maxDistance = GeoUtils.EARTH_MEAN_RADIUS_METERS * Math.PI;
protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) {
super(weight);
diff --git a/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java b/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java
index ae2f7fc4ecbbb..b1b30856324b4 100644
--- a/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java
+++ b/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java
@@ -25,7 +25,6 @@
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
-import java.util.Locale;
import static java.util.Collections.emptyList;
@@ -110,7 +109,7 @@ interface Loader {
public void swap(long lhs, long rhs) {}
@Override
- public Loader loader(LeafReaderContext ctx) throws IOException {
+ public Loader loader(LeafReaderContext ctx) {
return (index, doc) -> {};
}
};
@@ -254,24 +253,6 @@ public boolean inHeapMode(long bucket) {
*/
protected abstract void swap(long lhs, long rhs);
- /**
- * Return a fairly human readable representation of the array backing the sort.
- *
- * This is intentionally not a {@link #toString()} implementation because it'll
- * be quite slow.
- *
- */
- protected final String debugFormat() {
- StringBuilder b = new StringBuilder();
- for (long index = 0; index < values().size(); index++) {
- if (index % bucketSize == 0) {
- b.append('\n').append(String.format(Locale.ROOT, "%20d", index / bucketSize)).append(": ");
- }
- b.append(String.format(Locale.ROOT, "%20s", getValue(index))).append(' ');
- }
- return b.toString();
- }
-
/**
* Initialize the gather offsets after setting up values. Subclasses
* should call this once, after setting up their {@link #values()}.
@@ -415,7 +396,6 @@ public final void collect(int doc, long bucket) throws IOException {
} else {
setNextGatherOffset(rootIndex, next - 1);
}
- return;
}
/**
diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java
index 5d11563b5d8ed..0c9b56b1855d7 100644
--- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java
@@ -100,8 +100,12 @@ public SortFieldAndFormat build(SearchExecutionContext context) {
}
@Override
- public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays bigArrays, int bucketSize, BucketedSort.ExtraData extra)
- throws IOException {
+ public BucketedSort buildBucketedSort(
+ SearchExecutionContext context,
+ BigArrays bigArrays,
+ int bucketSize,
+ BucketedSort.ExtraData extra
+ ) {
return new BucketedSort.ForFloats(bigArrays, order, DocValueFormat.RAW, bucketSize, extra) {
@Override
public boolean needsScores() {
@@ -109,7 +113,7 @@ public boolean needsScores() {
}
@Override
- public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
+ public Leaf forLeaf(LeafReaderContext ctx) {
return new BucketedSort.ForFloats.Leaf(ctx) {
private Scorable scorer;
private float score;
@@ -165,7 +169,7 @@ public TransportVersion getMinimalSupportedVersion() {
}
@Override
- public ScoreSortBuilder rewrite(QueryRewriteContext ctx) throws IOException {
+ public ScoreSortBuilder rewrite(QueryRewriteContext ctx) {
return this;
}
diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
index 4ac7348a6c4a4..a0745d0f9c64a 100644
--- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
@@ -295,7 +295,7 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx
final BytesRefBuilder spare = new BytesRefBuilder();
@Override
- public boolean advanceExact(int doc) throws IOException {
+ public boolean advanceExact(int doc) {
leafScript.setDocument(doc);
return true;
}
@@ -343,7 +343,7 @@ protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws
leafScript = numberSortScript.newInstance(new DocValuesDocReader(searchLookup, context));
final NumericDoubleValues values = new NumericDoubleValues() {
@Override
- public boolean advanceExact(int doc) throws IOException {
+ public boolean advanceExact(int doc) {
leafScript.setDocument(doc);
return true;
}
@@ -374,7 +374,7 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx
final BinaryDocValues values = new AbstractBinaryDocValues() {
@Override
- public boolean advanceExact(int doc) throws IOException {
+ public boolean advanceExact(int doc) {
leafScript.setDocument(doc);
return true;
}
diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilders.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilders.java
index 34363a614a7e4..c0bcbdc98e35f 100644
--- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilders.java
+++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilders.java
@@ -8,7 +8,6 @@
package org.elasticsearch.search.sort;
-import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.builder.PointInTimeBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
@@ -65,16 +64,6 @@ public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, double la
return new GeoDistanceSortBuilder(fieldName, lat, lon);
}
- /**
- * Constructs a new distance based sort on a geo point like field.
- *
- * @param fieldName The geo point like field name.
- * @param points The points to create the range distance facets from.
- */
- public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, GeoPoint... points) {
- return new GeoDistanceSortBuilder(fieldName, points);
- }
-
/**
* Constructs a new distance based sort on a geo point like field.
*
diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortValue.java b/server/src/main/java/org/elasticsearch/search/sort/SortValue.java
index 067439931a85b..ab7dcd6615f79 100644
--- a/server/src/main/java/org/elasticsearch/search/sort/SortValue.java
+++ b/server/src/main/java/org/elasticsearch/search/sort/SortValue.java
@@ -369,7 +369,6 @@ private static class EmptySortValue extends SortValue {
public static final String NAME = "empty";
private static final String EMPTY_STRING = "";
- private int sortValue = 0;
private EmptySortValue() {}
@@ -381,7 +380,7 @@ public String getWriteableName() {
}
@Override
- public void writeTo(StreamOutput out) throws IOException {}
+ public void writeTo(StreamOutput out) {}
@Override
public Object getKey() {
@@ -394,7 +393,7 @@ public String format(DocValueFormat format) {
}
@Override
- protected XContentBuilder rawToXContent(XContentBuilder builder) throws IOException {
+ protected XContentBuilder rawToXContent(XContentBuilder builder) {
return builder;
}
@@ -420,7 +419,7 @@ public String toString() {
@Override
public int typeComparisonKey() {
- return sortValue;
+ return 0;
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java b/server/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java
index 80beb5d2ec7ca..0956a9f94677c 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java
@@ -20,17 +20,17 @@
public class DirectSpellcheckerSettings {
// NB: If this changes, make sure to change the default in TermBuilderSuggester
- public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
- public static float DEFAULT_ACCURACY = 0.5f;
- public static SortBy DEFAULT_SORT = SortBy.SCORE;
+ public static final SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
+ public static final float DEFAULT_ACCURACY = 0.5f;
+ public static final SortBy DEFAULT_SORT = SortBy.SCORE;
// NB: If this changes, make sure to change the default in TermBuilderSuggester
- public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN;
- public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE;
- public static int DEFAULT_MAX_INSPECTIONS = 5;
- public static float DEFAULT_MAX_TERM_FREQ = 0.01f;
- public static int DEFAULT_PREFIX_LENGTH = 1;
- public static int DEFAULT_MIN_WORD_LENGTH = 4;
- public static float DEFAULT_MIN_DOC_FREQ = 0f;
+ public static final StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN;
+ public static final int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE;
+ public static final int DEFAULT_MAX_INSPECTIONS = 5;
+ public static final float DEFAULT_MAX_TERM_FREQ = 0.01f;
+ public static final int DEFAULT_PREFIX_LENGTH = 1;
+ public static final int DEFAULT_MIN_WORD_LENGTH = 4;
+ public static final float DEFAULT_MIN_DOC_FREQ = 0f;
private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE;
private float accuracy = DEFAULT_ACCURACY;
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java
index f126091c785d8..f3371caf4c1a7 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java
@@ -206,7 +206,6 @@ public int hashCode() {
@SuppressWarnings("rawtypes")
public abstract static class Suggestion implements Iterable, NamedWriteable, ToXContentFragment {
- public static final int TYPE = 0;
protected final String name;
protected final int size;
protected final List entries = new ArrayList<>(5);
@@ -635,10 +634,6 @@ public boolean collateMatch() {
return (collateMatch != null) ? collateMatch : true;
}
- protected void setScore(float score) {
- this.score = score;
- }
-
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeText(text);
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java
index 674f936890283..37cc7bb59c253 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionSearchContext.java
@@ -35,8 +35,8 @@ public abstract static class SuggestionContext {
private Analyzer analyzer;
private int size = 5;
private int shardSize = -1;
- private SearchExecutionContext searchExecutionContext;
- private Suggester> suggester;
+ private final SearchExecutionContext searchExecutionContext;
+ private final Suggester> suggester;
protected SuggestionContext(Suggester> suggester, SearchExecutionContext searchExecutionContext) {
this.suggester = suggester;
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
index c71673962ca2d..e088948b18e03 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
@@ -56,7 +56,7 @@
*/
public final class CompletionSuggestion extends Suggest.Suggestion {
- private boolean skipDuplicates;
+ private final boolean skipDuplicates;
/**
* Creates a completion suggestion given its name, size and whether it should skip duplicates
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
index 86e18b3e5a406..7a3bc3c67ba6d 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
@@ -211,13 +211,6 @@ private CompletionSuggestionBuilder contexts(XContentBuilder contextBuilder) {
return this;
}
- /**
- * Returns whether duplicate suggestions should be filtered out.
- */
- public boolean skipDuplicates() {
- return skipDuplicates;
- }
-
/**
* Should duplicates be filtered or not. Defaults to {@code false}.
*/
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java
index f241b6f89633e..7d7d5516c50ae 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java
@@ -62,12 +62,12 @@ public static Builder builder() {
return new Builder();
}
- private int editDistance;
- private boolean transpositions;
- private int fuzzyMinLength;
- private int fuzzyPrefixLength;
- private boolean unicodeAware;
- private int maxDeterminizedStates;
+ private final int editDistance;
+ private final boolean transpositions;
+ private final int fuzzyMinLength;
+ private final int fuzzyPrefixLength;
+ private final boolean unicodeAware;
+ private final int maxDeterminizedStates;
private FuzzyOptions(
int editDistance,
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java
index 0759b413dd664..fdfa1303b2d77 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/RegexOptions.java
@@ -61,8 +61,8 @@ static RegexOptions parse(XContentParser parser) throws IOException {
return PARSER.parse(parser, null).build();
}
- private int flagsValue;
- private int maxDeterminizedStates;
+ private final int flagsValue;
+ private final int maxDeterminizedStates;
private RegexOptions(int flagsValue, int maxDeterminizedStates) {
this.flagsValue = flagsValue;
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/TopSuggestGroupDocsCollector.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/TopSuggestGroupDocsCollector.java
index bac3b7491a661..31959df6b023e 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/TopSuggestGroupDocsCollector.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/TopSuggestGroupDocsCollector.java
@@ -25,7 +25,7 @@
* the best one per document (sorted by weight) is kept.
**/
class TopSuggestGroupDocsCollector extends TopSuggestDocsCollector {
- private Map> docContexts = new HashMap<>();
+ private final Map> docContexts = new HashMap<>();
/**
* Sole constructor
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java
index ce0c58463bad2..65c464cac256d 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryQueryContext.java
@@ -72,7 +72,7 @@ public boolean equals(Object o) {
if (isPrefix != that.isPrefix) return false;
if (boost != that.boost) return false;
- return category != null ? category.equals(that.category) : that.category == null;
+ return Objects.equals(category, that.category);
}
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java
index b180e6fd13335..2a83bf289bdef 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java
@@ -13,7 +13,7 @@
*/
public abstract class ContextBuilder> {
- protected String name;
+ protected final String name;
/**
* @param name of the context mapper to build
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java
index c48a1ccb12e6f..d2edd460b926d 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java
@@ -165,41 +165,5 @@ public String toString() {
}
}
- public static class InternalQueryContext {
- public final String context;
- public final int boost;
- public final boolean isPrefix;
-
- public InternalQueryContext(String context, int boost, boolean isPrefix) {
- this.context = context;
- this.boost = boost;
- this.isPrefix = isPrefix;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
-
- InternalQueryContext that = (InternalQueryContext) o;
-
- if (boost != that.boost) return false;
- if (isPrefix != that.isPrefix) return false;
- return context != null ? context.equals(that.context) : that.context == null;
-
- }
-
- @Override
- public int hashCode() {
- int result = context != null ? context.hashCode() : 0;
- result = 31 * result + boost;
- result = 31 * result + (isPrefix ? 1 : 0);
- return result;
- }
-
- @Override
- public String toString() {
- return "QueryContext{" + "context='" + context + '\'' + ", boost=" + boost + ", isPrefix=" + isPrefix + '}';
- }
- }
+ public record InternalQueryContext(String context, int boost, boolean isPrefix) {}
}
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java
index 9a975fe930979..f7709d7aac911 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java
@@ -142,7 +142,7 @@ protected Iterable contexts() {
if (typedContexts.isEmpty()) {
throw new IllegalArgumentException("Contexts are mandatory in context enabled completion field [" + name + "]");
}
- return new ArrayList(typedContexts);
+ return new ArrayList<>(typedContexts);
}
}
@@ -166,8 +166,8 @@ public ContextQuery toContextQuery(CompletionQuery query, Map internalQueryContext = queryContexts.get(mapping.name());
if (internalQueryContext != null) {
for (ContextMapping.InternalQueryContext context : internalQueryContext) {
- scratch.append(context.context);
- typedContextQuery.addContext(scratch.toCharsRef(), context.boost, context.isPrefix == false);
+ scratch.append(context.context());
+ typedContextQuery.addContext(scratch.toCharsRef(), context.boost(), context.isPrefix() == false);
scratch.setLength(1);
hasContext = true;
}
@@ -193,12 +193,8 @@ public Map> getNamedContexts(List contexts) {
int typeId = typedContext.charAt(0);
assert typeId < contextMappings.size() : "Returned context has invalid type";
ContextMapping> mapping = contextMappings.get(typeId);
- Set contextEntries = contextMap.get(mapping.name());
- if (contextEntries == null) {
- contextEntries = new HashSet<>();
- contextMap.put(mapping.name(), contextEntries);
- }
- contextEntries.add(typedContext.subSequence(1, typedContext.length()).toString());
+ contextMap.computeIfAbsent(mapping.name(), k -> new HashSet<>())
+ .add(typedContext.subSequence(1, typedContext.length()).toString());
}
return contextMap;
}
@@ -273,7 +269,7 @@ public int hashCode() {
@Override
public boolean equals(Object obj) {
- if (obj == null || (obj instanceof ContextMappings) == false) {
+ if ((obj instanceof ContextMappings) == false) {
return false;
}
ContextMappings other = ((ContextMappings) obj);
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java
index 933d2198a2dae..2cd7a751264bd 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java
@@ -345,19 +345,6 @@ public Builder precision(String precision) {
return precision(DistanceUnit.parse(precision, DistanceUnit.METERS, DistanceUnit.METERS));
}
- /**
- * Set the precision use o make suggestions
- *
- * @param precision
- * precision value
- * @param unit
- * {@link DistanceUnit} to use
- * @return this
- */
- public Builder precision(double precision, DistanceUnit unit) {
- return precision(unit.toMeters(precision));
- }
-
/**
* Set the precision use o make suggestions
*
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java
index 61dfb0f075d34..fc29d1ed7a567 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateGenerator.java
@@ -17,15 +17,8 @@
//TODO public for tests
public abstract class CandidateGenerator {
- public abstract boolean isKnownWord(BytesRef term) throws IOException;
-
public abstract TermStats termStats(BytesRef term) throws IOException;
- public CandidateSet drawCandidates(BytesRef term) throws IOException {
- CandidateSet set = new CandidateSet(Candidate.EMPTY, createCandidate(term, true));
- return drawCandidates(set);
- }
-
public Candidate createCandidate(BytesRef term, boolean userInput) throws IOException {
return createCandidate(term, termStats(term), 1.0, userInput);
}
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java
index e379674d02eab..fdc05d12a2389 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/CandidateScorer.java
@@ -28,13 +28,13 @@ public Correction[] findBestCandiates(CandidateSet[] sets, float errorFraction,
if (sets.length == 0) {
return Correction.EMPTY;
}
- PriorityQueue corrections = new PriorityQueue(maxNumCorrections) {
+ PriorityQueue corrections = new PriorityQueue<>(maxNumCorrections) {
@Override
protected boolean lessThan(Correction a, Correction b) {
return a.compareTo(b) < 0;
}
};
- int numMissspellings = 1;
+ final int numMissspellings;
if (errorFraction >= 1.0) {
numMissspellings = (int) errorFraction;
} else {
@@ -62,11 +62,11 @@ public void findCandidates(
CandidateSet current = candidates[ord];
if (ord == candidates.length - 1) {
path[ord] = current.originalTerm;
- updateTop(candidates, path, corrections, cutoffScore, pathScore + scorer.score(path, candidates, ord, gramSize));
+ updateTop(candidates, path, corrections, cutoffScore, pathScore + scorer.score(path, ord, gramSize));
if (numMissspellingsLeft > 0) {
for (int i = 0; i < current.candidates.length; i++) {
path[ord] = current.candidates[i];
- updateTop(candidates, path, corrections, cutoffScore, pathScore + scorer.score(path, candidates, ord, gramSize));
+ updateTop(candidates, path, corrections, cutoffScore, pathScore + scorer.score(path, ord, gramSize));
}
}
} else {
@@ -79,7 +79,7 @@ public void findCandidates(
numMissspellingsLeft,
corrections,
cutoffScore,
- pathScore + scorer.score(path, candidates, ord, gramSize)
+ pathScore + scorer.score(path, ord, gramSize)
);
for (int i = 0; i < current.candidates.length; i++) {
path[ord] = current.candidates[i];
@@ -90,20 +90,12 @@ public void findCandidates(
numMissspellingsLeft - 1,
corrections,
cutoffScore,
- pathScore + scorer.score(path, candidates, ord, gramSize)
+ pathScore + scorer.score(path, ord, gramSize)
);
}
} else {
path[ord] = current.originalTerm;
- findCandidates(
- candidates,
- path,
- ord + 1,
- 0,
- corrections,
- cutoffScore,
- pathScore + scorer.score(path, candidates, ord, gramSize)
- );
+ findCandidates(candidates, path, ord + 1, 0, corrections, cutoffScore, pathScore + scorer.score(path, ord, gramSize));
}
}
@@ -135,7 +127,7 @@ private void updateTop(
public double score(Candidate[] path, CandidateSet[] candidates) throws IOException {
double score = 0.0d;
for (int i = 0; i < candidates.length; i++) {
- score += scorer.score(path, candidates, i, gramSize);
+ score += scorer.score(path, i, gramSize);
}
return Math.exp(score);
}
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
index 98143e0acf413..b95971d13c11d 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
@@ -97,14 +97,6 @@ public DirectCandidateGenerator(
termsEnum = terms.iterator();
}
- /* (non-Javadoc)
- * @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#isKnownWord(org.apache.lucene.util.BytesRef)
- */
- @Override
- public boolean isKnownWord(BytesRef term) throws IOException {
- return termStats(term).docFreq > 0;
- }
-
/* (non-Javadoc)
* @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#frequency(org.apache.lucene.util.BytesRef)
*/
@@ -128,10 +120,6 @@ public TermStats internalTermStats(BytesRef term) throws IOException {
return new TermStats(0, 0);
}
- public String getField() {
- return field;
- }
-
@Override
public CandidateSet drawCandidates(CandidateSet set) throws IOException {
Candidate original = set.originalTerm;
@@ -181,15 +169,14 @@ protected BytesRef preFilter(final BytesRef term, final CharsRefBuilder spare, f
if (preFilter == null) {
return term;
}
- final BytesRefBuilder result = byteSpare;
analyze(preFilter, term, field, new TokenConsumer() {
@Override
- public void nextToken() throws IOException {
- this.fillBytesRef(result);
+ public void nextToken() {
+ this.fillBytesRef(byteSpare);
}
}, spare);
- return result.get();
+ return byteSpare.get();
}
protected void postFilter(
@@ -344,11 +331,10 @@ public boolean equals(Object obj) {
if (getClass() != obj.getClass()) return false;
Candidate other = (Candidate) obj;
if (term == null) {
- if (other.term != null) return false;
+ return other.term == null;
} else {
- if (term.equals(other.term) == false) return false;
+ return term.equals(other.term) != false;
}
- return true;
}
/** Lower scores sort first; if scores are equal, then later (zzz) terms sort first */
@@ -364,7 +350,7 @@ public int compareTo(Candidate other) {
}
@Override
- public Candidate createCandidate(BytesRef term, TermStats termStats, double channelScore, boolean userInput) throws IOException {
+ public Candidate createCandidate(BytesRef term, TermStats termStats, double channelScore, boolean userInput) {
return new Candidate(term, termStats, channelScore, score(termStats, channelScore, sumTotalTermFreq), userInput);
}
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
index b3cb3444d2206..a153d4de54dcb 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
@@ -437,32 +437,24 @@ public PhraseSuggestionContext.DirectCandidateGenerator build(IndexAnalyzers ind
private static SuggestMode resolveSuggestMode(String suggestMode) {
suggestMode = suggestMode.toLowerCase(Locale.US);
- if ("missing".equals(suggestMode)) {
- return SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
- } else if ("popular".equals(suggestMode)) {
- return SuggestMode.SUGGEST_MORE_POPULAR;
- } else if ("always".equals(suggestMode)) {
- return SuggestMode.SUGGEST_ALWAYS;
- } else {
- throw new IllegalArgumentException("Illegal suggest mode " + suggestMode);
- }
+ return switch (suggestMode) {
+ case "missing" -> SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
+ case "popular" -> SuggestMode.SUGGEST_MORE_POPULAR;
+ case "always" -> SuggestMode.SUGGEST_ALWAYS;
+ default -> throw new IllegalArgumentException("Illegal suggest mode " + suggestMode);
+ };
}
static StringDistance resolveDistance(String distanceVal) {
distanceVal = distanceVal.toLowerCase(Locale.ROOT);
- if ("internal".equals(distanceVal)) {
- return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
- } else if ("damerau_levenshtein".equals(distanceVal)) {
- return new LuceneLevenshteinDistance();
- } else if ("levenshtein".equals(distanceVal)) {
- return new LevenshteinDistance();
- } else if ("jaro_winkler".equals(distanceVal)) {
- return new JaroWinklerDistance();
- } else if ("ngram".equals(distanceVal)) {
- return new NGramDistance();
- } else {
- throw new IllegalArgumentException("Illegal distance option " + distanceVal);
- }
+ return switch (distanceVal) {
+ case "internal" -> DirectSpellChecker.INTERNAL_LEVENSHTEIN;
+ case "damerau_levenshtein" -> new LuceneLevenshteinDistance();
+ case "levenshtein" -> new LevenshteinDistance();
+ case "jaro_winkler" -> new JaroWinklerDistance();
+ case "ngram" -> new NGramDistance();
+ default -> throw new IllegalArgumentException("Illegal distance option " + distanceVal);
+ };
}
private static void transferIfNotNull(T value, Consumer consumer) {
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java
index fe85dd70b7337..a14bddd03cdec 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java
@@ -42,7 +42,7 @@ public final class Laplace extends SmoothingModel {
*/
public static final double DEFAULT_LAPLACE_ALPHA = 0.5;
- private double alpha = DEFAULT_LAPLACE_ALPHA;
+ private final double alpha;
/**
* Creates a Laplace smoothing model.
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java
index ff752a8e62985..dce063d6e655b 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java
@@ -15,7 +15,7 @@
import java.io.IOException;
final class LaplaceScorer extends WordScorer {
- private double alpha;
+ private final double alpha;
LaplaceScorer(IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator, double alpha)
throws IOException {
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java
index 80ebd9e45acf8..7e804c173da9c 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/MultiCandidateGeneratorWrapper.java
@@ -19,18 +19,13 @@
public final class MultiCandidateGeneratorWrapper extends CandidateGenerator {
private final CandidateGenerator[] candidateGenerator;
- private int numCandidates;
+ private final int numCandidates;
public MultiCandidateGeneratorWrapper(int numCandidates, CandidateGenerator... candidateGenerators) {
this.candidateGenerator = candidateGenerators;
this.numCandidates = numCandidates;
}
- @Override
- public boolean isKnownWord(BytesRef term) throws IOException {
- return candidateGenerator[0].isKnownWord(term);
- }
-
@Override
public TermStats termStats(BytesRef term) throws IOException {
return candidateGenerator[0].termStats(term);
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java
index 52c15eb214da9..4400852ebbd5a 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java
@@ -159,7 +159,7 @@ public Suggestion extends Entry extends Option>> innerExecute(
return response;
}
- private static TokenStream tokenStream(Analyzer analyzer, BytesRef query, CharsRefBuilder spare, String field) throws IOException {
+ private static TokenStream tokenStream(Analyzer analyzer, BytesRef query, CharsRefBuilder spare, String field) {
spare.copyUTF8Bytes(query);
return analyzer.tokenStream(field, new CharArrayReader(spare.chars(), 0, spare.length()));
}
@@ -174,7 +174,7 @@ protected Suggestion extends Entry extends Option>> emptySuggestion(
String name,
PhraseSuggestionContext suggestion,
CharsRefBuilder spare
- ) throws IOException {
+ ) {
PhraseSuggestion phraseSuggestion = new PhraseSuggestion(name, suggestion.getSize());
spare.copyUTF8Bytes(suggestion.getText());
phraseSuggestion.addTerm(new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length()));
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java
index 2cb04b73b7f5f..1c881a9887583 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java
@@ -73,13 +73,6 @@ public Entry(StreamInput in) throws IOException {
cutoffScore = in.readDouble();
}
- /**
- * @return cutoff score for suggestions. input term score * confidence for phrase suggest, 0 otherwise
- */
- public double getCutoffScore() {
- return cutoffScore;
- }
-
@Override
protected void merge(Suggestion.Entry other) {
super.merge(other);
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java
index 23c6d9db0ce2f..10525d7dc89d9 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java
@@ -276,12 +276,7 @@ public Float confidence() {
* phrase term before the candidates are scored.
*/
public PhraseSuggestionBuilder addCandidateGenerator(CandidateGenerator generator) {
- List list = this.generators.get(generator.getType());
- if (list == null) {
- list = new ArrayList<>();
- this.generators.put(generator.getType(), list);
- }
- list.add(generator);
+ this.generators.computeIfAbsent(generator.getType(), k -> new ArrayList<>()).add(generator);
return this;
}
@@ -513,7 +508,7 @@ public static PhraseSuggestionBuilder fromXContent(XContentParser parser) throws
} else if (token == Token.START_ARRAY) {
if (DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
// for now we only have a single type of generators
- while ((token = parser.nextToken()) == Token.START_OBJECT) {
+ while (parser.nextToken() == Token.START_OBJECT) {
tmpSuggestion.addCandidateGenerator(DirectCandidateGeneratorBuilder.PARSER.apply(parser, null));
}
} else {
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java
index 7c6f791c78aab..d5cd53f68b4aa 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java
@@ -48,7 +48,7 @@ class PhraseSuggestionContext extends SuggestionContext {
private BytesRef postTag;
private TemplateScript.Factory scriptFactory;
private boolean prune = DEFAULT_COLLATE_PRUNE;
- private List generators = new ArrayList<>();
+ private final List generators = new ArrayList<>();
private Map collateScriptParams = Maps.newMapWithExpectedSize(1);
private WordScorer.WordScorerFactory scorer = DEFAULT_SCORER;
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java
index fe2c2b660eb72..8557b83302d8f 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java
@@ -43,7 +43,7 @@ public final class StupidBackoff extends SmoothingModel {
private static final ParseField DISCOUNT_FIELD = new ParseField("discount");
static final ParseField PARSE_FIELD = new ParseField(NAME);
- private double discount = DEFAULT_BACKOFF_DISCOUNT;
+ private final double discount;
/**
* Creates a Stupid-Backoff smoothing model.
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java
index fd73ed841935a..a098d797e385c 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java
@@ -16,15 +16,11 @@
import org.elasticsearch.common.lucene.index.FreqTermsEnum;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate;
-import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet;
import java.io.IOException;
//TODO public for tests
public abstract class WordScorer {
- protected final IndexReader reader;
- protected final String field;
- protected final Terms terms;
protected final long vocabluarySize;
protected final double realWordLikelihood;
protected final BytesRefBuilder spare = new BytesRefBuilder();
@@ -38,11 +34,9 @@ public WordScorer(IndexReader reader, String field, double realWordLikelihood, B
}
public WordScorer(IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator) throws IOException {
- this.field = field;
if (terms == null) {
throw new IllegalArgumentException("Field: [" + field + "] does not exist");
}
- this.terms = terms;
final long vocSize = terms.getSumTotalTermFreq();
this.vocabluarySize = vocSize == -1 ? reader.maxDoc() : vocSize;
this.useTotalTermFreq = vocSize != -1;
@@ -60,7 +54,6 @@ public WordScorer(IndexReader reader, Terms terms, String field, double realWord
null,
BigArrays.NON_RECYCLING_INSTANCE
);
- this.reader = reader;
this.realWordLikelihood = realWordLikelihood;
this.separator = separator;
}
@@ -72,20 +65,20 @@ public long frequency(BytesRef term) throws IOException {
return 0;
}
- protected double channelScore(Candidate candidate, Candidate original) throws IOException {
+ protected double channelScore(Candidate candidate) {
if (candidate.stringDistance == 1.0d) {
return realWordLikelihood;
}
return candidate.stringDistance;
}
- public double score(Candidate[] path, CandidateSet[] candidateSet, int at, int gramSize) throws IOException {
+ public double score(Candidate[] path, int at, int gramSize) throws IOException {
if (at == 0 || gramSize == 1) {
- return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreUnigram(path[at]));
+ return Math.log10(channelScore(path[at]) * scoreUnigram(path[at]));
} else if (at == 1 || gramSize == 2) {
- return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreBigram(path[at], path[at - 1]));
+ return Math.log10(channelScore(path[at]) * scoreBigram(path[at], path[at - 1]));
} else {
- return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreTrigram(path[at], path[at - 1], path[at - 2]));
+ return Math.log10(channelScore(path[at]) * scoreTrigram(path[at], path[at - 1], path[at - 2]));
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java
index 0cb7ddac471b2..ce55385dfa550 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java
@@ -34,7 +34,7 @@ public class TermSuggestion extends Suggestion {
public static final Comparator SCORE = new Score();
public static final Comparator FREQUENCY = new Frequency();
- private SortBy sort;
+ private final SortBy sort;
public TermSuggestion(String name, int size, SortBy sort) {
super(name, size);
@@ -85,14 +85,6 @@ public int compare(Suggestion.Entry.Option first, Suggestion.Entry.Option second
}
}
- public void setSort(SortBy sort) {
- this.sort = sort;
- }
-
- public SortBy getSort() {
- return sort;
- }
-
@Override
protected Comparator sortComparator() {
return switch (sort) {
@@ -193,10 +185,6 @@ protected void mergeInto(Suggestion.Entry.Option otherOption) {
freq += ((Option) otherOption).freq;
}
- public void setFreq(int freq) {
- this.freq = freq;
- }
-
/**
* @return How often this suggested text appears in the index.
*/
diff --git a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java
index 8e28c65b8ca64..5a97c3e53c11a 100644
--- a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java
@@ -434,7 +434,7 @@ public static TermSuggestionBuilder fromXContent(XContentParser parser) throws I
}
@Override
- public SuggestionContext build(SearchExecutionContext context) throws IOException {
+ public SuggestionContext build(SearchExecutionContext context) {
TermSuggestionContext suggestionContext = new TermSuggestionContext(context);
// copy over common settings to each suggestion builder
populateCommonFields(context, suggestionContext);
diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java
index b7f3433df62e2..5cd75adc32c5b 100644
--- a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java
@@ -234,10 +234,6 @@ public KnnSearchBuilder(StreamInput in) throws IOException {
}
}
- public String getField() {
- return field;
- }
-
public int k() {
return k;
}
diff --git a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java
index 6aafc207a707e..bb8615fb8ab48 100644
--- a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java
+++ b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java
@@ -53,10 +53,6 @@ float getSimilarity() {
return similarity;
}
- float getDocScore() {
- return docScore;
- }
-
@Override
public Query rewrite(IndexSearcher searcher) throws IOException {
Query rewrittenInnerQuery = innerKnnQuery.rewrite(searcher);
diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java
deleted file mode 100644
index 7c2dde414c8ad..0000000000000
--- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotCreationException.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.snapshots;
-
-import org.elasticsearch.common.io.stream.StreamInput;
-
-import java.io.IOException;
-
-/**
- * Thrown when snapshot creation fails completely
- * TODO: Remove this class in 8.0
- * @deprecated This exception isn't thrown anymore. It's only here for BwC.
- */
-@Deprecated
-public class SnapshotCreationException extends SnapshotException {
-
- public SnapshotCreationException(StreamInput in) throws IOException {
- super(in);
- }
-}
diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java
index 9bf773d083f5f..8612b5221c77b 100644
--- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java
+++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java
@@ -362,23 +362,24 @@ protected Recycler createRecycler(Settings settings, PageCacheRecycler
@Override
public void openConnection(DiscoveryNode node, ConnectionProfile profile, ActionListener listener) {
-
- Objects.requireNonNull(profile, "connection profile cannot be null");
- if (node == null) {
- throw new ConnectTransportException(null, "can't open connection to a null node");
- }
- ConnectionProfile finalProfile = maybeOverrideConnectionProfile(profile);
- if (closeLock.readLock().tryLock() == false) {
- ensureOpen();
- assert false : "should not get here ever because close-write-lock should only be held on shutdown";
- throw new ConnectTransportException(node, "failed to acquire close-read-lock");
- }
- try {
- ensureOpen();
- initiateConnection(node, finalProfile, listener);
- } finally {
- closeLock.readLock().unlock();
- }
+ ActionListener.run(listener, l -> {
+ Objects.requireNonNull(profile, "connection profile cannot be null");
+ if (node == null) {
+ throw new ConnectTransportException(null, "can't open connection to a null node");
+ }
+ final var finalProfile = maybeOverrideConnectionProfile(profile);
+ if (closeLock.readLock().tryLock() == false) {
+ ensureOpen();
+ assert false : "should not get here ever because close-write-lock should only be held on shutdown";
+ throw new ConnectTransportException(node, "failed to acquire close-read-lock");
+ }
+ try {
+ ensureOpen();
+ initiateConnection(node, finalProfile, l);
+ } finally {
+ closeLock.readLock().unlock();
+ }
+ });
}
private void initiateConnection(DiscoveryNode node, ConnectionProfile connectionProfile, ActionListener listener) {
diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
index 405211423f602..abf79243b6a61 100644
--- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
+++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
@@ -693,7 +693,7 @@ public void testIds() {
ids.put(24, org.elasticsearch.search.SearchContextMissingException.class);
ids.put(25, org.elasticsearch.script.GeneralScriptException.class);
ids.put(26, null);
- ids.put(27, org.elasticsearch.snapshots.SnapshotCreationException.class);
+ ids.put(27, null); // was SnapshotCreationException
ids.put(28, null); // was DeleteFailedEngineException, deprecated in 6.0 and removed in 7.0
ids.put(29, org.elasticsearch.index.engine.DocumentMissingException.class);
ids.put(30, org.elasticsearch.snapshots.SnapshotException.class);
diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
index 2ed24e95c16d5..b0364209a811a 100644
--- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java
@@ -237,6 +237,7 @@ public void run() {
assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields[0], equalTo(0));
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932")
public void testMinimumVersionSameAsNewVersion() throws Exception {
Version newVersion = Version.CURRENT;
Version oldVersion = VersionUtils.randomVersionBetween(
@@ -354,6 +355,7 @@ public void onResponse(SearchResponse response) {
);
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932")
public void testMinimumVersionSameAsOldVersion() throws Exception {
Version newVersion = Version.CURRENT;
Version oldVersion = VersionUtils.randomVersionBetween(
@@ -500,6 +502,7 @@ public void run() {
assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO));
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932")
public void testMinimumVersionShardDuringPhaseExecution() throws Exception {
Version newVersion = Version.CURRENT;
Version oldVersion = VersionUtils.randomVersionBetween(
diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java
index 9a76aaf6f4b79..b02dea53bc8b9 100644
--- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java
@@ -302,7 +302,15 @@ private void doFromXContentTestWithRandomFields(SearchResponse response, boolean
}
try (XContentParser parser = createParser(xcontentType.xContent(), mutated)) {
SearchResponse parsed = SearchResponse.fromXContent(parser);
- assertToXContentEquivalent(originalBytes, XContentHelper.toXContent(parsed, xcontentType, params, humanReadable), xcontentType);
+ try {
+ assertToXContentEquivalent(
+ originalBytes,
+ XContentHelper.toXContent(parsed, xcontentType, params, humanReadable),
+ xcontentType
+ );
+ } finally {
+ parsed.decRef();
+ }
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
@@ -331,25 +339,29 @@ public void testFromXContentWithFailures() throws IOException {
);
try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) {
SearchResponse parsed = SearchResponse.fromXContent(parser);
- for (int i = 0; i < parsed.getShardFailures().length; i++) {
- ShardSearchFailure parsedFailure = parsed.getShardFailures()[i];
- ShardSearchFailure originalFailure = failures[i];
- assertEquals(originalFailure.index(), parsedFailure.index());
- assertEquals(originalFailure.shard(), parsedFailure.shard());
- assertEquals(originalFailure.shardId(), parsedFailure.shardId());
- String originalMsg = originalFailure.getCause().getMessage();
- assertEquals(
- parsedFailure.getCause().getMessage(),
- "Elasticsearch exception [type=parsing_exception, reason=" + originalMsg + "]"
- );
- String nestedMsg = originalFailure.getCause().getCause().getMessage();
- assertEquals(
- parsedFailure.getCause().getCause().getMessage(),
- "Elasticsearch exception [type=illegal_argument_exception, reason=" + nestedMsg + "]"
- );
+ try {
+ for (int i = 0; i < parsed.getShardFailures().length; i++) {
+ ShardSearchFailure parsedFailure = parsed.getShardFailures()[i];
+ ShardSearchFailure originalFailure = failures[i];
+ assertEquals(originalFailure.index(), parsedFailure.index());
+ assertEquals(originalFailure.shard(), parsedFailure.shard());
+ assertEquals(originalFailure.shardId(), parsedFailure.shardId());
+ String originalMsg = originalFailure.getCause().getMessage();
+ assertEquals(
+ parsedFailure.getCause().getMessage(),
+ "Elasticsearch exception [type=parsing_exception, reason=" + originalMsg + "]"
+ );
+ String nestedMsg = originalFailure.getCause().getCause().getMessage();
+ assertEquals(
+ parsedFailure.getCause().getCause().getMessage(),
+ "Elasticsearch exception [type=illegal_argument_exception, reason=" + nestedMsg + "]"
+ );
+ }
+ assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
+ assertNull(parser.nextToken());
+ } finally {
+ parsed.decRef();
}
- assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
- assertNull(parser.nextToken());
}
}
diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java
new file mode 100644
index 0000000000000..c894585edd776
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.cluster.routing;
+
+import org.elasticsearch.cluster.ClusterInfo;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.ESAllocationTestCase;
+import org.elasticsearch.cluster.metadata.IndexMetadata;
+import org.elasticsearch.cluster.metadata.Metadata;
+import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
+import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
+import org.elasticsearch.index.IndexVersion;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.repositories.IndexId;
+import org.elasticsearch.snapshots.InternalSnapshotsInfoService;
+import org.elasticsearch.snapshots.Snapshot;
+import org.elasticsearch.snapshots.SnapshotId;
+import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
+
+import java.util.List;
+import java.util.Map;
+
+import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_RESIZE_SOURCE_NAME_KEY;
+import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_RESIZE_SOURCE_UUID_KEY;
+import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
+import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting;
+import static org.hamcrest.Matchers.equalTo;
+
+public class ExpectedShardSizeEstimatorTests extends ESAllocationTestCase {
+
+ private final long defaultValue = randomLongBetween(-1, 0);
+
+ public void testShouldFallbackToDefaultValue() {
+
+ var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build();
+ var shard = newShardRouting("my-index", 0, randomIdentifier(), true, ShardRoutingState.INITIALIZING);
+
+ var allocation = createRoutingAllocation(state, ClusterInfo.EMPTY, SnapshotShardSizeInfo.EMPTY);
+
+ assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(defaultValue));
+ }
+
+ public void testShouldReadExpectedSizeFromClusterInfo() {
+
+ var shardSize = randomLongBetween(100, 1000);
+ var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build();
+ var shard = newShardRouting("my-index", 0, randomIdentifier(), true, ShardRoutingState.INITIALIZING);
+
+ var clusterInfo = createClusterInfo(shard, shardSize);
+ var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY);
+
+ assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(shardSize));
+ }
+
+ public void testShouldReadExpectedSizeWhenInitializingFromSnapshot() {
+
+ var snapshotShardSize = randomLongBetween(100, 1000);
+ var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build();
+
+ var snapshot = new Snapshot("repository", new SnapshotId("snapshot-1", "na"));
+ var indexId = new IndexId("my-index", "_na_");
+
+ var shard = newShardRouting(
+ new ShardId("my-index", "_na_", 0),
+ null,
+ true,
+ ShardRoutingState.UNASSIGNED,
+ new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId)
+ );
+
+ var snapshotShardSizeInfo = new SnapshotShardSizeInfo(
+ Map.of(new InternalSnapshotsInfoService.SnapshotShard(snapshot, indexId, shard.shardId()), snapshotShardSize)
+ );
+ var allocation = createRoutingAllocation(state, ClusterInfo.EMPTY, snapshotShardSizeInfo);
+
+ assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(snapshotShardSize));
+ }
+
+ public void testShouldReadSizeFromClonedShard() {
+
+ var sourceShardSize = randomLongBetween(100, 1000);
+ var source = newShardRouting(new ShardId("source", "_na_", 0), randomIdentifier(), true, ShardRoutingState.STARTED);
+ var target = newShardRouting(
+ new ShardId("target", "_na_", 0),
+ randomIdentifier(),
+ true,
+ ShardRoutingState.INITIALIZING,
+ RecoverySource.LocalShardsRecoverySource.INSTANCE
+ );
+
+ var state = ClusterState.builder(ClusterName.DEFAULT)
+ .metadata(
+ metadata(
+ IndexMetadata.builder("source").settings(indexSettings(IndexVersion.current(), 2, 0)),
+ IndexMetadata.builder("target")
+ .settings(
+ indexSettings(IndexVersion.current(), 1, 0) //
+ .put(INDEX_RESIZE_SOURCE_NAME_KEY, "source") //
+ .put(INDEX_RESIZE_SOURCE_UUID_KEY, "_na_")
+ )
+ )
+ )
+ .routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(source.index()).addShard(source)))
+ .build();
+
+ var clusterInfo = createClusterInfo(source, sourceShardSize);
+ var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY);
+
+ assertThat(getExpectedShardSize(target, defaultValue, allocation), equalTo(sourceShardSize));
+ }
+
+ private static RoutingAllocation createRoutingAllocation(
+ ClusterState state,
+ ClusterInfo clusterInfo,
+ SnapshotShardSizeInfo snapshotShardSizeInfo
+ ) {
+ return new RoutingAllocation(new AllocationDeciders(List.of()), state, clusterInfo, snapshotShardSizeInfo, 0);
+ }
+
+ private static IndexMetadata.Builder index(String name) {
+ return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 0));
+ }
+
+ private static Metadata metadata(IndexMetadata.Builder... indices) {
+ var builder = Metadata.builder();
+ for (IndexMetadata.Builder index : indices) {
+ builder.put(index.build(), false);
+ }
+ return builder.build();
+ }
+
+ private static ClusterInfo createClusterInfo(ShardRouting shard, Long size) {
+ return new ClusterInfo(
+ Map.of(),
+ Map.of(),
+ Map.of(ClusterInfo.shardIdentifierFromRouting(shard), size),
+ Map.of(),
+ Map.of(),
+ Map.of()
+ );
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
index a99c77b0be3cb..9ccc80ae8d1b4 100644
--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
@@ -12,37 +12,170 @@
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.DiskUsage;
import org.elasticsearch.cluster.ESAllocationTestCase;
+import org.elasticsearch.cluster.RestoreInProgress;
import org.elasticsearch.cluster.TestShardRoutingRoleStrategies;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.RoutingTable;
+import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.repositories.IndexId;
+import org.elasticsearch.snapshots.InternalSnapshotsInfoService;
+import org.elasticsearch.snapshots.Snapshot;
+import org.elasticsearch.snapshots.SnapshotId;
+import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
+import org.elasticsearch.test.gateway.TestGatewayAllocator;
+import java.util.Collection;
+import java.util.List;
import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Function;
+import static java.util.stream.Collectors.toMap;
import static org.elasticsearch.cluster.routing.RoutingNodesHelper.shardsWithState;
+import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase {
+
+ public void testAllocateToCorrectNodeUsingShardSizeFromClusterInfo() {
+
+ var indexMetadata = IndexMetadata.builder("test").settings(indexSettings(IndexVersion.current(), 1, 0)).build();
+
+ var clusterState = ClusterState.builder(ClusterName.DEFAULT)
+ .nodes(DiscoveryNodes.builder().add(newNode("node-1")).add(newNode("node-2")).add(newNode("node-3")))
+ .metadata(Metadata.builder().put(indexMetadata, false))
+ .routingTable(RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(indexMetadata))
+ .build();
+ var dataNodeIds = clusterState.nodes().getDataNodes().keySet();
+
+ long shardSize = ByteSizeValue.ofGb(1).getBytes();
+ long diskSize = ByteSizeValue.ofGb(5).getBytes();
+ long headRoom = diskSize / 10;
+ var expectedNodeId = randomFrom(dataNodeIds);
+ var clusterInfo = createClusterInfo(
+ createDiskUsage(
+ dataNodeIds,
+ nodeId -> createDiskUsage(nodeId, diskSize, headRoom + shardSize + (Objects.equals(nodeId, expectedNodeId) ? +1 : -1))
+ ),
+ Map.of(ClusterInfo.shardIdentifierFromRouting(new ShardId(indexMetadata.getIndex(), 0), true), shardSize)
+ );
+
+ AllocationService service = createAllocationService(Settings.EMPTY, () -> clusterInfo);
+ clusterState = service.reroute(clusterState, "reroute", ActionListener.noop());
+
+ assertThatShard(
+ clusterState.routingTable().index(indexMetadata.getIndex()).shard(0).primaryShard(),
+ INITIALIZING,
+ expectedNodeId,
+ shardSize
+ );
+ }
+
+ public void testAllocateToCorrectNodeAccordingToSnapshotShardInfo() {
+
+ var snapshot = new Snapshot("repository", new SnapshotId("snapshot-1", "na"));
+ var indexId = new IndexId("my-index", "_na_");
+ var restoreId = "restore-id";
+
+ var indexMetadata = IndexMetadata.builder("test")
+ .settings(indexSettings(IndexVersion.current(), 1, 0))
+ .putInSyncAllocationIds(0, Set.of(randomUUID()))
+ .build();
+
+ var clusterState = ClusterState.builder(ClusterName.DEFAULT)
+ .nodes(DiscoveryNodes.builder().add(newNode("node-1")).add(newNode("node-2")).add(newNode("node-3")))
+ .metadata(Metadata.builder().put(indexMetadata, false))
+ .routingTable(
+ RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY)
+ .addAsRestore(
+ indexMetadata,
+ new RecoverySource.SnapshotRecoverySource(restoreId, snapshot, IndexVersion.current(), indexId)
+ )
+ )
+ .customs(
+ Map.of(
+ RestoreInProgress.TYPE,
+ new RestoreInProgress.Builder().add(
+ new RestoreInProgress.Entry(
+ restoreId,
+ snapshot,
+ RestoreInProgress.State.STARTED,
+ false,
+ List.of(indexMetadata.getIndex().getName()),
+ Map.of(new ShardId(indexMetadata.getIndex(), 0), new RestoreInProgress.ShardRestoreStatus(randomIdentifier()))
+ )
+ ).build()
+ )
+ )
+ .build();
+ var dataNodeIds = clusterState.nodes().getDataNodes().keySet();
+
+ long shardSize = ByteSizeValue.ofGb(1).getBytes();
+ long diskSize = ByteSizeValue.ofGb(5).getBytes();
+ long headRoom = diskSize / 10;
+ var expectedNodeId = randomFrom(dataNodeIds);
+ var clusterInfo = createClusterInfo(
+ createDiskUsage(
+ dataNodeIds,
+ nodeId -> createDiskUsage(nodeId, diskSize, headRoom + shardSize + (Objects.equals(nodeId, expectedNodeId) ? +1 : -1))
+ ),
+ Map.of()
+ );
+ var snapshotShardSizeInfo = new SnapshotShardSizeInfo(
+ Map.of(new InternalSnapshotsInfoService.SnapshotShard(snapshot, indexId, new ShardId(indexMetadata.getIndex(), 0)), shardSize)
+ );
+
+ AllocationService service = createAllocationService(
+ Settings.EMPTY,
+ new TestGatewayAllocator(),
+ () -> clusterInfo,
+ () -> snapshotShardSizeInfo
+ );
+ clusterState = service.reroute(clusterState, "reroute", ActionListener.noop());
+
+ assertThatShard(
+ clusterState.routingTable().index(indexMetadata.getIndex()).shard(0).primaryShard(),
+ INITIALIZING,
+ expectedNodeId,
+ shardSize
+ );
+ }
+
+ private static void assertThatShard(ShardRouting shard, ShardRoutingState state, String nodeId, long expectedShardSize) {
+ assertThat(shard.state(), equalTo(state));
+ assertThat(shard.currentNodeId(), equalTo(nodeId));
+ assertThat(shard.getExpectedShardSize(), equalTo(expectedShardSize));
+ }
+
+ private static Map createDiskUsage(Collection nodeIds, Function diskUsageCreator) {
+ return nodeIds.stream().collect(toMap(Function.identity(), diskUsageCreator));
+ }
+
+ private static DiskUsage createDiskUsage(String nodeId, long totalBytes, long freeBytes) {
+ return new DiskUsage(nodeId, nodeId, "/data", totalBytes, freeBytes);
+ }
+
public void testInitializingHasExpectedSize() {
final long byteSize = randomIntBetween(0, Integer.MAX_VALUE);
final ClusterInfo clusterInfo = createClusterInfoWith(new ShardId("test", "_na_", 0), byteSize);
AllocationService strategy = createAllocationService(Settings.EMPTY, () -> clusterInfo);
logger.info("Building initial routing table");
- var indexMetadata = IndexMetadata.builder("test")
- .settings(settings(IndexVersion.current()))
- .numberOfShards(1)
- .numberOfReplicas(1)
- .build();
+ var indexMetadata = IndexMetadata.builder("test").settings(indexSettings(IndexVersion.current(), 1, 1)).build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata, false))
@@ -52,11 +185,8 @@ public void testInitializingHasExpectedSize() {
logger.info("Adding one node and performing rerouting");
clusterState = strategy.reroute(clusterState, "reroute", ActionListener.noop());
- assertEquals(1, clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.INITIALIZING));
- assertEquals(
- byteSize,
- shardsWithState(clusterState.getRoutingNodes(), ShardRoutingState.INITIALIZING).get(0).getExpectedShardSize()
- );
+ assertEquals(1, clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(INITIALIZING));
+ assertEquals(byteSize, shardsWithState(clusterState.getRoutingNodes(), INITIALIZING).get(0).getExpectedShardSize());
logger.info("Start the primary shard");
clusterState = startInitializingShardsAndReroute(strategy, clusterState);
@@ -67,11 +197,8 @@ public void testInitializingHasExpectedSize() {
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).add(newNode("node2"))).build();
clusterState = strategy.reroute(clusterState, "reroute", ActionListener.noop());
- assertEquals(1, clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.INITIALIZING));
- assertEquals(
- byteSize,
- shardsWithState(clusterState.getRoutingNodes(), ShardRoutingState.INITIALIZING).get(0).getExpectedShardSize()
- );
+ assertEquals(1, clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(INITIALIZING));
+ assertEquals(byteSize, shardsWithState(clusterState.getRoutingNodes(), INITIALIZING).get(0).getExpectedShardSize());
}
public void testExpectedSizeOnMove() {
@@ -79,11 +206,7 @@ public void testExpectedSizeOnMove() {
final ClusterInfo clusterInfo = createClusterInfoWith(new ShardId("test", "_na_", 0), byteSize);
final AllocationService allocation = createAllocationService(Settings.EMPTY, () -> clusterInfo);
logger.info("creating an index with 1 shard, no replica");
- var indexMetadata = IndexMetadata.builder("test")
- .settings(settings(IndexVersion.current()))
- .numberOfShards(1)
- .numberOfReplicas(0)
- .build();
+ var indexMetadata = IndexMetadata.builder("test").settings(indexSettings(IndexVersion.current(), 1, 0)).build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata, false))
.routingTable(RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(indexMetadata))
@@ -111,7 +234,7 @@ public void testExpectedSizeOnMove() {
assertThat(commandsResult.clusterState(), not(equalTo(clusterState)));
clusterState = commandsResult.clusterState();
assertEquals(clusterState.getRoutingNodes().node(existingNodeId).iterator().next().state(), ShardRoutingState.RELOCATING);
- assertEquals(clusterState.getRoutingNodes().node(toNodeId).iterator().next().state(), ShardRoutingState.INITIALIZING);
+ assertEquals(clusterState.getRoutingNodes().node(toNodeId).iterator().next().state(), INITIALIZING);
assertEquals(clusterState.getRoutingNodes().node(existingNodeId).iterator().next().getExpectedShardSize(), byteSize);
assertEquals(clusterState.getRoutingNodes().node(toNodeId).iterator().next().getExpectedShardSize(), byteSize);
@@ -137,4 +260,8 @@ private static ClusterInfo createClusterInfoWith(ShardId shardId, long size) {
Map.of()
);
}
+
+ private static ClusterInfo createClusterInfo(Map diskUsage, Map shardSizes) {
+ return new ClusterInfo(diskUsage, diskUsage, shardSizes, Map.of(), Map.of(), Map.of());
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java
index 31cdb082feabc..add94e3b9344b 100644
--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java
@@ -68,6 +68,7 @@
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.elasticsearch.common.settings.ClusterSettings.createBuiltInClusterSettings;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.not;
@@ -158,6 +159,7 @@ public ClusterState apply(ClusterState clusterState, RerouteStrategy routingAllo
clusterService,
reconcileAction
);
+ assertValidStats(desiredBalanceShardsAllocator.getStats());
var allocationService = createAllocationService(desiredBalanceShardsAllocator, createGatewayAllocator(allocateUnassigned));
allocationServiceRef.set(allocationService);
@@ -200,11 +202,21 @@ public void onFailure(Exception e) {
}
}
}
+ assertValidStats(desiredBalanceShardsAllocator.getStats());
} finally {
clusterService.close();
}
}
+ private void assertValidStats(DesiredBalanceStats stats) {
+ assertThat(stats.lastConvergedIndex(), greaterThanOrEqualTo(0L));
+ try {
+ assertEquals(stats, copyWriteable(stats, writableRegistry(), DesiredBalanceStats::readFrom));
+ } catch (Exception e) {
+ fail(e);
+ }
+ }
+
public void testShouldNotRemoveAllocationDelayMarkersOnReconcile() {
var localNode = newNode(LOCAL_NODE_ID);
diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
index 41af35629790d..88c7dc24b4089 100644
--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
@@ -51,6 +51,7 @@
import java.util.Map;
import static java.util.Collections.emptySet;
+import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize;
import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING;
import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE;
import static org.hamcrest.Matchers.containsString;
@@ -459,9 +460,9 @@ public void testShardSizeAndRelocatingSize() {
test_2 = ShardRoutingHelper.initialize(test_2, "node1");
test_2 = ShardRoutingHelper.moveToStarted(test_2);
- assertEquals(1000L, DiskThresholdDecider.getExpectedShardSize(test_2, 0L, allocation));
- assertEquals(100L, DiskThresholdDecider.getExpectedShardSize(test_1, 0L, allocation));
- assertEquals(10L, DiskThresholdDecider.getExpectedShardSize(test_0, 0L, allocation));
+ assertEquals(1000L, getExpectedShardSize(test_2, 0L, allocation));
+ assertEquals(100L, getExpectedShardSize(test_1, 0L, allocation));
+ assertEquals(10L, getExpectedShardSize(test_0, 0L, allocation));
RoutingNode node = RoutingNodesHelper.routingNode(
"node1",
@@ -484,7 +485,7 @@ public void testShardSizeAndRelocatingSize() {
);
test_3 = ShardRoutingHelper.initialize(test_3, "node1");
test_3 = ShardRoutingHelper.moveToStarted(test_3);
- assertEquals(0L, DiskThresholdDecider.getExpectedShardSize(test_3, 0L, allocation));
+ assertEquals(0L, getExpectedShardSize(test_3, 0L, allocation));
boolean primary = randomBoolean();
ShardRouting other_0 = ShardRouting.newUnassigned(
@@ -725,10 +726,10 @@ public void testSizeShrinkIndex() {
ShardRouting.Role.DEFAULT
);
test_3 = ShardRoutingHelper.initialize(test_3, "node1");
- assertEquals(500L, DiskThresholdDecider.getExpectedShardSize(test_3, 0L, allocation));
- assertEquals(500L, DiskThresholdDecider.getExpectedShardSize(test_2, 0L, allocation));
- assertEquals(100L, DiskThresholdDecider.getExpectedShardSize(test_1, 0L, allocation));
- assertEquals(10L, DiskThresholdDecider.getExpectedShardSize(test_0, 0L, allocation));
+ assertEquals(500L, getExpectedShardSize(test_3, 0L, allocation));
+ assertEquals(500L, getExpectedShardSize(test_2, 0L, allocation));
+ assertEquals(100L, getExpectedShardSize(test_1, 0L, allocation));
+ assertEquals(10L, getExpectedShardSize(test_0, 0L, allocation));
ShardRouting target = ShardRouting.newUnassigned(
new ShardId(new Index("target", "5678"), 0),
@@ -737,7 +738,7 @@ public void testSizeShrinkIndex() {
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"),
ShardRouting.Role.DEFAULT
);
- assertEquals(1110L, DiskThresholdDecider.getExpectedShardSize(target, 0L, allocation));
+ assertEquals(1110L, getExpectedShardSize(target, 0L, allocation));
ShardRouting target2 = ShardRouting.newUnassigned(
new ShardId(new Index("target2", "9101112"), 0),
@@ -746,7 +747,7 @@ public void testSizeShrinkIndex() {
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"),
ShardRouting.Role.DEFAULT
);
- assertEquals(110L, DiskThresholdDecider.getExpectedShardSize(target2, 0L, allocation));
+ assertEquals(110L, getExpectedShardSize(target2, 0L, allocation));
target2 = ShardRouting.newUnassigned(
new ShardId(new Index("target2", "9101112"), 1),
@@ -755,7 +756,7 @@ public void testSizeShrinkIndex() {
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"),
ShardRouting.Role.DEFAULT
);
- assertEquals(1000L, DiskThresholdDecider.getExpectedShardSize(target2, 0L, allocation));
+ assertEquals(1000L, getExpectedShardSize(target2, 0L, allocation));
// check that the DiskThresholdDecider still works even if the source index has been deleted
ClusterState clusterStateWithMissingSourceIndex = ClusterState.builder(clusterState)
@@ -765,8 +766,8 @@ public void testSizeShrinkIndex() {
allocationService.reroute(clusterState, "foo", ActionListener.noop());
RoutingAllocation allocationWithMissingSourceIndex = new RoutingAllocation(null, clusterStateWithMissingSourceIndex, info, null, 0);
- assertEquals(42L, DiskThresholdDecider.getExpectedShardSize(target, 42L, allocationWithMissingSourceIndex));
- assertEquals(42L, DiskThresholdDecider.getExpectedShardSize(target2, 42L, allocationWithMissingSourceIndex));
+ assertEquals(42L, getExpectedShardSize(target, 42L, allocationWithMissingSourceIndex));
+ assertEquals(42L, getExpectedShardSize(target2, 42L, allocationWithMissingSourceIndex));
}
public void testDiskUsageWithRelocations() {
diff --git a/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java b/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java
index 7e77b3a4a1d73..ed18c1b0dc3d5 100644
--- a/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java
+++ b/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java
@@ -100,7 +100,8 @@ public void testConvertToLoggedFields() {
Map loggerResults = HealthPeriodicLogger.convertToLoggedFields(results);
- assertThat(loggerResults.size(), equalTo(results.size() + 1));
+ // verify that the number of fields is the number of indicators + 2 (for overall and for message)
+ assertThat(loggerResults.size(), equalTo(results.size() + 2));
// test indicator status
assertThat(loggerResults.get(makeHealthStatusString("network_latency")), equalTo("green"));
@@ -110,6 +111,12 @@ public void testConvertToLoggedFields() {
// test calculated overall status
assertThat(loggerResults.get(makeHealthStatusString("overall")), equalTo(overallStatus.xContentValue()));
+ // test calculated message
+ assertThat(
+ loggerResults.get(HealthPeriodicLogger.MESSAGE_FIELD),
+ equalTo(String.format(Locale.ROOT, "health=%s", overallStatus.xContentValue()))
+ );
+
// test empty results
{
List empty = new ArrayList<>();
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
index cd04f81f0f355..4a2d74016615e 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
@@ -21,6 +21,7 @@
import java.io.IOException;
import java.time.Instant;
+import java.util.stream.Stream;
import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAX_DIMS_COUNT;
import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING;
@@ -991,4 +992,15 @@ public void testDefaultDenseVectorMappingsObject() throws IOException {
ObjectMapper parent = (ObjectMapper) update.getRoot().getMapper("parent_object");
assertThat(((FieldMapper) parent.getMapper("mapsToDenseVector")).fieldType().typeName(), equalTo("dense_vector"));
}
+
+ public void testStringArraysAreText() throws IOException {
+ DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("numeric_detection", true)));
+ BytesReference source = BytesReference.bytes(
+ XContentFactory.jsonBuilder().startObject().field("mapsToString", Stream.generate(() -> "foo").limit(129).toArray()).endObject()
+ );
+ ParsedDocument parsedDocument = mapper.parse(new SourceToParse("id", source, XContentType.JSON));
+ Mapping update = parsedDocument.dynamicMappingsUpdate();
+ assertNotNull(update);
+ assertThat(((FieldMapper) update.getRoot().getMapper("mapsToString")).fieldType().typeName(), equalTo("text"));
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java
index 20b9661ad0b42..40d1f2488749a 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java
@@ -573,7 +573,6 @@ public void doTestDocValueRangeQueries(NumberType type, Supplier valueSu
dir.close();
}
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/74057")
public void testIndexSortIntRange() throws Exception {
doTestIndexSortRangeQueries(NumberType.INTEGER, random()::nextInt);
}
diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
index d4560fb6d765e..9765618e05e34 100644
--- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
+++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
@@ -3088,6 +3088,36 @@ public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionL
closeShards(primary, replica);
}
+ public void testWaitForEngineListener() throws IOException {
+ Settings settings = indexSettings(IndexVersion.current(), 1, 1).build();
+ IndexMetadata metadata = IndexMetadata.builder("test").putMapping("""
+ { "properties": { "foo": { "type": "text"}}}""").settings(settings).primaryTerm(0, 1).build();
+ IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, null);
+
+ AtomicBoolean called = new AtomicBoolean(false);
+ primary.waitForEngineOrClosedShard(ActionListener.running(() -> called.set(true)));
+ assertThat("listener should not have been called yet", called.get(), equalTo(false));
+
+ recoverShardFromStore(primary);
+ assertThat("listener should have been called", called.get(), equalTo(true));
+
+ closeShards(primary);
+ }
+
+ public void testWaitForClosedListener() throws IOException {
+ Settings settings = indexSettings(IndexVersion.current(), 1, 1).build();
+ IndexMetadata metadata = IndexMetadata.builder("test").putMapping("""
+ { "properties": { "foo": { "type": "text"}}}""").settings(settings).primaryTerm(0, 1).build();
+ IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, null);
+
+ AtomicBoolean called = new AtomicBoolean(false);
+ primary.waitForEngineOrClosedShard(ActionListener.running(() -> called.set(true)));
+ assertThat("listener should not have been called yet", called.get(), equalTo(false));
+
+ closeShards(primary);
+ assertThat("listener should have been called", called.get(), equalTo(true));
+ }
+
public void testRecoverFromLocalShard() throws IOException {
Settings settings = indexSettings(IndexVersion.current(), 1, 1).build();
IndexMetadata metadata = IndexMetadata.builder("source")
diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
index ed6fe0296a4c7..310ce21e0618a 100644
--- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
+++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
@@ -222,7 +222,6 @@ private void verifySortedNumeric(
missingValue,
rootDocs,
new BitSetIterator(innerDocs, 0L),
- maxDoc,
maxChildren
);
int prevRoot = -1;
@@ -610,7 +609,6 @@ private void verifySortedBinary(
missingValue,
rootDocs,
new BitSetIterator(innerDocs, 0L),
- maxDoc,
maxChildren
);
int prevRoot = -1;
diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java
index 7bade799bc8ea..d08abe5065984 100644
--- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java
+++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java
@@ -102,7 +102,7 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp
Map highlightFields = Maps.newMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
HighlightField testItem = HighlightFieldTests.createTestItem();
- highlightFields.put(testItem.getName(), testItem);
+ highlightFields.put(testItem.name(), testItem);
}
hit.highlightFields(highlightFields);
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java
index 2e410cf0a10f5..1f61f507e88a6 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java
@@ -77,7 +77,6 @@ public void testStringField() throws IOException {
/**
* Attempting to use HDRPercentileAggregation on a range field throws IllegalArgumentException
*/
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/42949")
public void testRangeField() throws IOException {
// Currently fails (throws ClassCast exception), but should be fixed once HDRPercentileAggregation uses the ValuesSource registry
final String fieldName = "range";
diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java
index f174ae9180522..2d081fe2e3049 100644
--- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java
+++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java
@@ -113,9 +113,9 @@ public void testSerialization() throws IOException {
}
private static HighlightField mutate(HighlightField original) {
- Text[] fragments = original.getFragments();
+ Text[] fragments = original.fragments();
if (randomBoolean()) {
- return new HighlightField(original.getName() + "_suffix", fragments);
+ return new HighlightField(original.name() + "_suffix", fragments);
} else {
if (fragments == null) {
fragments = new Text[] { new Text("field") };
@@ -123,12 +123,12 @@ private static HighlightField mutate(HighlightField original) {
fragments = Arrays.copyOf(fragments, fragments.length + 1);
fragments[fragments.length - 1] = new Text("something new");
}
- return new HighlightField(original.getName(), fragments);
+ return new HighlightField(original.name(), fragments);
}
}
private static HighlightField copy(HighlightField original) {
- return new HighlightField(original.getName(), original.getFragments());
+ return new HighlightField(original.name(), original.fragments());
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java
index 3906dfe83342e..06fdee30968b9 100644
--- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java
+++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java
@@ -459,9 +459,9 @@ public void testQueryContextParsingBasic() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(1));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(false));
}
}
@@ -471,9 +471,9 @@ public void testBooleanQueryContextParsingBasic() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
- assertThat(internalQueryContexts.get(0).context, equalTo("true"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(1));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("true"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(false));
}
}
@@ -483,9 +483,9 @@ public void testNumberQueryContextParsingBasic() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
- assertThat(internalQueryContexts.get(0).context, equalTo("10"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(1));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("10"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(false));
}
}
@@ -505,12 +505,12 @@ public void testQueryContextParsingArray() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(1));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
- assertThat(internalQueryContexts.get(1).boost, equalTo(1));
- assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(1).context(), equalTo("context2"));
+ assertThat(internalQueryContexts.get(1).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(1).isPrefix(), equalTo(false));
}
}
@@ -520,18 +520,18 @@ public void testQueryContextParsingMixedTypeValuesArray() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(4));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(1));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
- assertThat(internalQueryContexts.get(1).boost, equalTo(1));
- assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(2).context, equalTo("true"));
- assertThat(internalQueryContexts.get(2).boost, equalTo(1));
- assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(3).context, equalTo("10"));
- assertThat(internalQueryContexts.get(3).boost, equalTo(1));
- assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(1).context(), equalTo("context2"));
+ assertThat(internalQueryContexts.get(1).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(1).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(2).context(), equalTo("true"));
+ assertThat(internalQueryContexts.get(2).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(2).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(3).context(), equalTo("10"));
+ assertThat(internalQueryContexts.get(3).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(3).isPrefix(), equalTo(false));
}
}
@@ -561,9 +561,9 @@ public void testQueryContextParsingObject() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(10));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(10));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(true));
}
}
@@ -573,9 +573,9 @@ public void testQueryContextParsingObjectHavingBoolean() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
- assertThat(internalQueryContexts.get(0).context, equalTo("false"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(10));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("false"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(10));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(true));
}
}
@@ -585,9 +585,9 @@ public void testQueryContextParsingObjectHavingNumber() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
- assertThat(internalQueryContexts.get(0).context, equalTo("333"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(10));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("333"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(10));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(true));
}
}
@@ -618,12 +618,12 @@ public void testQueryContextParsingObjectArray() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(2));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
- assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
- assertThat(internalQueryContexts.get(1).boost, equalTo(3));
- assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(2));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(true));
+ assertThat(internalQueryContexts.get(1).context(), equalTo("context2"));
+ assertThat(internalQueryContexts.get(1).boost(), equalTo(3));
+ assertThat(internalQueryContexts.get(1).isPrefix(), equalTo(false));
}
}
@@ -654,18 +654,18 @@ public void testQueryContextParsingMixedTypeObjectArray() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(4));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(2));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
- assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
- assertThat(internalQueryContexts.get(1).boost, equalTo(3));
- assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(2).context, equalTo("true"));
- assertThat(internalQueryContexts.get(2).boost, equalTo(3));
- assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(3).context, equalTo("333"));
- assertThat(internalQueryContexts.get(3).boost, equalTo(3));
- assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(2));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(true));
+ assertThat(internalQueryContexts.get(1).context(), equalTo("context2"));
+ assertThat(internalQueryContexts.get(1).boost(), equalTo(3));
+ assertThat(internalQueryContexts.get(1).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(2).context(), equalTo("true"));
+ assertThat(internalQueryContexts.get(2).boost(), equalTo(3));
+ assertThat(internalQueryContexts.get(2).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(3).context(), equalTo("333"));
+ assertThat(internalQueryContexts.get(3).boost(), equalTo(3));
+ assertThat(internalQueryContexts.get(3).isPrefix(), equalTo(false));
}
}
@@ -724,18 +724,18 @@ public void testQueryContextParsingMixed() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(4));
- assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
- assertThat(internalQueryContexts.get(0).boost, equalTo(2));
- assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
- assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
- assertThat(internalQueryContexts.get(1).boost, equalTo(1));
- assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(2).context, equalTo("false"));
- assertThat(internalQueryContexts.get(2).boost, equalTo(1));
- assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false));
- assertThat(internalQueryContexts.get(3).context, equalTo("333"));
- assertThat(internalQueryContexts.get(3).boost, equalTo(2));
- assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true));
+ assertThat(internalQueryContexts.get(0).context(), equalTo("context1"));
+ assertThat(internalQueryContexts.get(0).boost(), equalTo(2));
+ assertThat(internalQueryContexts.get(0).isPrefix(), equalTo(true));
+ assertThat(internalQueryContexts.get(1).context(), equalTo("context2"));
+ assertThat(internalQueryContexts.get(1).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(1).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(2).context(), equalTo("false"));
+ assertThat(internalQueryContexts.get(2).boost(), equalTo(1));
+ assertThat(internalQueryContexts.get(2).isPrefix(), equalTo(false));
+ assertThat(internalQueryContexts.get(3).context(), equalTo("333"));
+ assertThat(internalQueryContexts.get(3).boost(), equalTo(2));
+ assertThat(internalQueryContexts.get(3).isPrefix(), equalTo(true));
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java
index f5757910b2dc2..54700999cd037 100644
--- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java
+++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java
@@ -313,9 +313,9 @@ public void testParsingQueryContextBasic() throws Exception {
locations.add("ezs42e");
addNeighborsAtLevel("ezs42e", GeoContextMapping.DEFAULT_PRECISION, locations);
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
- assertThat(internalQueryContext.context, is(in(locations)));
- assertThat(internalQueryContext.boost, equalTo(1));
- assertThat(internalQueryContext.isPrefix, equalTo(false));
+ assertThat(internalQueryContext.context(), is(in(locations)));
+ assertThat(internalQueryContext.boost(), equalTo(1));
+ assertThat(internalQueryContext.isPrefix(), equalTo(false));
}
}
@@ -329,9 +329,9 @@ public void testParsingQueryContextGeoPoint() throws Exception {
locations.add("wh0n94");
addNeighborsAtLevel("wh0n94", GeoContextMapping.DEFAULT_PRECISION, locations);
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
- assertThat(internalQueryContext.context, is(in(locations)));
- assertThat(internalQueryContext.boost, equalTo(1));
- assertThat(internalQueryContext.isPrefix, equalTo(false));
+ assertThat(internalQueryContext.context(), is(in(locations)));
+ assertThat(internalQueryContext.boost(), equalTo(1));
+ assertThat(internalQueryContext.isPrefix(), equalTo(false));
}
}
@@ -357,9 +357,12 @@ public void testParsingQueryContextObject() throws Exception {
locations.add("wh0");
addNeighborsAtLevel("wh0", 3, locations);
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
- assertThat(internalQueryContext.context, is(in(locations)));
- assertThat(internalQueryContext.boost, equalTo(10));
- assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
+ assertThat(internalQueryContext.context(), is(in(locations)));
+ assertThat(internalQueryContext.boost(), equalTo(10));
+ assertThat(
+ internalQueryContext.isPrefix(),
+ equalTo(internalQueryContext.context().length() < GeoContextMapping.DEFAULT_PRECISION)
+ );
}
}
@@ -399,14 +402,17 @@ public void testParsingQueryContextObjectArray() throws Exception {
secondLocations.add("w5cx0");
addNeighborsAtLevel("w5cx0", 5, secondLocations);
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
- if (firstLocations.contains(internalQueryContext.context)) {
- assertThat(internalQueryContext.boost, equalTo(10));
- } else if (secondLocations.contains(internalQueryContext.context)) {
- assertThat(internalQueryContext.boost, equalTo(2));
+ if (firstLocations.contains(internalQueryContext.context())) {
+ assertThat(internalQueryContext.boost(), equalTo(10));
+ } else if (secondLocations.contains(internalQueryContext.context())) {
+ assertThat(internalQueryContext.boost(), equalTo(2));
} else {
- fail(internalQueryContext.context + " was not expected");
+ fail(internalQueryContext.context() + " was not expected");
}
- assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
+ assertThat(
+ internalQueryContext.isPrefix(),
+ equalTo(internalQueryContext.context().length() < GeoContextMapping.DEFAULT_PRECISION)
+ );
}
}
@@ -439,14 +445,17 @@ public void testParsingQueryContextMixed() throws Exception {
secondLocations.add("w5cx04");
addNeighborsAtLevel("w5cx04", 6, secondLocations);
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
- if (firstLocations.contains(internalQueryContext.context)) {
- assertThat(internalQueryContext.boost, equalTo(10));
- } else if (secondLocations.contains(internalQueryContext.context)) {
- assertThat(internalQueryContext.boost, equalTo(1));
+ if (firstLocations.contains(internalQueryContext.context())) {
+ assertThat(internalQueryContext.boost(), equalTo(10));
+ } else if (secondLocations.contains(internalQueryContext.context())) {
+ assertThat(internalQueryContext.boost(), equalTo(1));
} else {
- fail(internalQueryContext.context + " was not expected");
+ fail(internalQueryContext.context() + " was not expected");
}
- assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
+ assertThat(
+ internalQueryContext.isPrefix(),
+ equalTo(internalQueryContext.context().length() < GeoContextMapping.DEFAULT_PRECISION)
+ );
}
}
}
diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/ApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java
similarity index 97%
rename from test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/ApmIT.java
rename to test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java
index b75ce619e54ea..0c33cd4984d86 100644
--- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/ApmIT.java
+++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java
@@ -35,7 +35,7 @@
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
-public class ApmIT extends ESRestTestCase {
+public class MetricsApmIT extends ESRestTestCase {
private static final XContentProvider.FormatProvider XCONTENT = XContentProvider.provider().getJsonXContent();
@ClassRule
@@ -83,6 +83,8 @@ public void testApmIntegration() throws Exception {
Consumer messageConsumer = (String message) -> {
var apmMessage = parseMap(message);
if (isElasticsearchMetric(apmMessage)) {
+ logger.info("Apm metric message received: " + message);
+
var metricset = (Map) apmMessage.get("metricset");
var samples = (Map) metricset.get("samples");
diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java
index c3a8df2c4b150..542f8a8d7d2f2 100644
--- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java
+++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/RecordingApmServer.java
@@ -60,7 +60,6 @@ private Thread consumerThread() {
try {
String msg = received.poll(1L, TimeUnit.SECONDS);
if (msg != null && msg.isEmpty() == false) {
- logger.info("APM server received: " + msg);
consumer.accept(msg);
}
@@ -84,7 +83,6 @@ private void handle(HttpExchange exchange) throws IOException {
try (InputStream requestBody = exchange.getRequestBody()) {
if (requestBody != null) {
var read = readJsonMessages(requestBody);
- read.forEach(s -> logger.debug(s));
received.addAll(read);
}
}
diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java
new file mode 100644
index 0000000000000..79816114cc38f
--- /dev/null
+++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/TracesApmIT.java
@@ -0,0 +1,146 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.test.apmintegration;
+
+import org.elasticsearch.client.Request;
+import org.elasticsearch.client.RequestOptions;
+import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.test.cluster.ElasticsearchCluster;
+import org.elasticsearch.test.cluster.local.distribution.DistributionType;
+import org.elasticsearch.test.rest.ESRestTestCase;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParserConfiguration;
+import org.elasticsearch.xcontent.spi.XContentProvider;
+import org.hamcrest.Matcher;
+import org.hamcrest.StringDescription;
+import org.junit.ClassRule;
+import org.junit.Rule;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class TracesApmIT extends ESRestTestCase {
+ private static final XContentProvider.FormatProvider XCONTENT = XContentProvider.provider().getJsonXContent();
+ final String traceIdValue = "0af7651916cd43dd8448eb211c80319c";
+ final String traceParentValue = "00-" + traceIdValue + "-b7ad6b7169203331-01";
+
+ @ClassRule
+ public static RecordingApmServer mockApmServer = new RecordingApmServer();
+
+ @Rule
+ public ElasticsearchCluster cluster = ElasticsearchCluster.local()
+ .distribution(DistributionType.INTEG_TEST)
+ .module("test-apm-integration")
+ .module("apm")
+ .setting("telemetry.metrics.enabled", "false")
+ .setting("tracing.apm.enabled", "true")
+ .setting("tracing.apm.agent.metrics_interval", "1s")
+ .setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockApmServer.getPort())
+ .build();
+
+ @Override
+ protected String getTestRestCluster() {
+ return cluster.getHttpAddresses();
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApmIntegration() throws Exception {
+ Set>> assertions = new HashSet<>(
+ Set.of(allTrue(transactionValue("name", equalTo("GET /_nodes/stats")), transactionValue("trace_id", equalTo(traceIdValue))))
+ );
+
+ CountDownLatch finished = new CountDownLatch(1);
+
+ // a consumer that will remove the assertions from a map once it matched
+ Consumer messageConsumer = (String message) -> {
+ var apmMessage = parseMap(message);
+ if (isTransactionTraceMessage(apmMessage)) {
+ logger.info("Apm transaction message received: " + message);
+ assertions.removeIf(e -> e.test(apmMessage));
+ }
+
+ if (assertions.isEmpty()) {
+ finished.countDown();
+ }
+ };
+
+ mockApmServer.addMessageConsumer(messageConsumer);
+
+ Request nodeStatsRequest = new Request("GET", "/_nodes/stats");
+
+ nodeStatsRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader(Task.TRACE_PARENT_HTTP_HEADER, traceParentValue).build());
+
+ client().performRequest(nodeStatsRequest);
+
+ finished.await(30, TimeUnit.SECONDS);
+ assertThat(assertions, equalTo(Collections.emptySet()));
+ }
+
+ private boolean isTransactionTraceMessage(Map apmMessage) {
+ return apmMessage.containsKey("transaction");
+ }
+
+ @SuppressWarnings("unchecked")
+ private Predicate> allTrue(Predicate>... predicates) {
+ var allTrueTest = Arrays.stream(predicates).reduce(v -> true, Predicate::and);
+ return new Predicate<>() {
+ @Override
+ public boolean test(Map map) {
+ return allTrueTest.test(map);
+ }
+
+ @Override
+ public String toString() {
+ return Arrays.stream(predicates).map(p -> p.toString()).collect(Collectors.joining(" and "));
+ }
+ };
+ }
+
+ @SuppressWarnings("unchecked")
+ private Predicate> transactionValue(String path, Matcher expected) {
+
+ return new Predicate<>() {
+ @Override
+ public boolean test(Map map) {
+ var transaction = (Map) map.get("transaction");
+ var value = XContentMapValues.extractValue(path, transaction);
+ return expected.matches((T) value);
+ }
+
+ @Override
+ public String toString() {
+ StringDescription matcherDescription = new StringDescription();
+ expected.describeTo(matcherDescription);
+ return path + " " + matcherDescription;
+ }
+ };
+ }
+
+ private Map parseMap(String message) {
+ try (XContentParser parser = XCONTENT.XContent().createParser(XContentParserConfiguration.EMPTY, message)) {
+ return parser.map();
+ } catch (IOException e) {
+ fail(e);
+ return Collections.emptyMap();
+ }
+ }
+
+}
diff --git a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java
index 8cbf233616c53..a9f1ab7780f7f 100644
--- a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java
@@ -101,7 +101,8 @@ private static Map> storedFields(StoredFieldsSpec spec, Par
*/
protected static void assertHighlights(Map highlights, String field, String... fragments) {
assertNotNull("No highlights reported for field [" + field + "]", highlights.get(field));
- List actualFragments = Arrays.stream(highlights.get(field).getFragments()).map(Text::toString).collect(Collectors.toList());
+ HighlightField highlightField = highlights.get(field);
+ List actualFragments = Arrays.stream(highlightField.fragments()).map(Text::toString).collect(Collectors.toList());
List expectedFragments = List.of(fragments);
assertEquals(expectedFragments, actualFragments);
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java
index 4ba558407f101..3ec327f7f3332 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java
@@ -217,11 +217,6 @@ public boolean sourceRequested() {
return false;
}
- @Override
- public boolean hasFetchSourceContext() {
- return false;
- }
-
@Override
public FetchSourceContext fetchSourceContext() {
return null;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
index 1e1e1f084016a..9566456a041bc 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
@@ -197,6 +197,7 @@ public enum ProductFeature {
CCR,
SHUTDOWN,
LEGACY_TEMPLATES,
+ SEARCHABLE_SNAPSHOTS
}
private static EnumSet availableFeatures;
@@ -241,6 +242,9 @@ public void initClient() throws IOException {
if (moduleName.equals("x-pack-shutdown")) {
availableFeatures.add(ProductFeature.SHUTDOWN);
}
+ if (moduleName.equals("searchable-snapshots")) {
+ availableFeatures.add(ProductFeature.SEARCHABLE_SNAPSHOTS);
+ }
if (moduleName.startsWith("serverless-")) {
serverless = true;
}
@@ -718,10 +722,11 @@ private void wipeCluster() throws Exception {
}
// Clean up searchable snapshots indices before deleting snapshots and repositories
- if (has(ProductFeature.XPACK)
- && nodeVersions.first().onOrAfter(Version.V_7_8_0)
- && preserveSearchableSnapshotsIndicesUponCompletion() == false) {
- wipeSearchableSnapshotsIndices();
+ if (has(ProductFeature.SEARCHABLE_SNAPSHOTS)) {
+ assert nodeVersions.first().onOrAfter(Version.V_7_8_0);
+ if (preserveSearchableSnapshotsIndicesUponCompletion() == false) {
+ wipeSearchableSnapshotsIndices();
+ }
}
wipeSnapshots();
@@ -962,14 +967,23 @@ private Set getAllUnexpectedTemplates() throws IOException {
*/
@SuppressWarnings("unchecked")
protected void deleteAllNodeShutdownMetadata() throws IOException {
- if (has(ProductFeature.SHUTDOWN) == false || minimumNodeVersion().before(Version.V_7_15_0)) {
- // Node shutdown APIs are only present in xpack
+ if (has(ProductFeature.SHUTDOWN) == false) {
return;
}
+
Request getShutdownStatus = new Request("GET", "_nodes/shutdown");
Map statusResponse = responseAsMap(adminClient().performRequest(getShutdownStatus));
- List> nodesArray = (List>) statusResponse.get("nodes");
- List nodeIds = nodesArray.stream().map(nodeShutdownMetadata -> (String) nodeShutdownMetadata.get("node_id")).toList();
+
+ Object nodesResponse = statusResponse.get("nodes");
+ final List nodeIds;
+ if (nodesResponse instanceof List>) { // `nodes` is parsed as a List<> only if it's populated (not empty)
+ assert minimumNodeVersion().onOrAfter(Version.V_7_15_0);
+ List> nodesArray = (List>) nodesResponse;
+ nodeIds = nodesArray.stream().map(nodeShutdownMetadata -> (String) nodeShutdownMetadata.get("node_id")).toList();
+ } else {
+ nodeIds = List.of();
+ }
+
for (String nodeId : nodeIds) {
Request deleteRequest = new Request("DELETE", "_nodes/" + nodeId + "/shutdown");
assertOK(adminClient().performRequest(deleteRequest));
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
index f9085ec258627..ea9dd001e5ce8 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
@@ -25,6 +25,7 @@
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.cluster.node.VersionInformation;
import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -881,6 +882,31 @@ public void handleException(TransportException exp) {
assertThat(e.getCause().getCause().getMessage(), equalTo("runtime_exception: bad message !!!"));
}
+ public void testExceptionOnConnect() {
+ final var transportA = serviceA.getOriginalTransport();
+
+ final var nullProfileFuture = new PlainActionFuture();
+ transportA.openConnection(nodeB, null, nullProfileFuture);
+ assertTrue(nullProfileFuture.isDone());
+ expectThrows(ExecutionException.class, NullPointerException.class, nullProfileFuture::get);
+
+ final var profile = ConnectionProfile.buildDefaultConnectionProfile(Settings.EMPTY);
+ final var nullNodeFuture = new PlainActionFuture();
+ transportA.openConnection(null, profile, nullNodeFuture);
+ assertTrue(nullNodeFuture.isDone());
+ expectThrows(ExecutionException.class, ConnectTransportException.class, nullNodeFuture::get);
+
+ serviceA.stop();
+ assertEquals(Lifecycle.State.STOPPED, transportA.lifecycleState());
+ serviceA.close();
+ assertEquals(Lifecycle.State.CLOSED, transportA.lifecycleState());
+
+ final var closedTransportFuture = new PlainActionFuture();
+ transportA.openConnection(nodeB, profile, closedTransportFuture);
+ assertTrue(closedTransportFuture.isDone());
+ expectThrows(ExecutionException.class, IllegalStateException.class, closedTransportFuture::get);
+ }
+
public void testDisconnectListener() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
TransportConnectionListener disconnectListener = new TransportConnectionListener() {
diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
index da6786c8341cc..630ee9883ff83 100644
--- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
+++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
@@ -484,27 +484,19 @@ public void test() throws IOException {
);
final Settings globalTemplateSettings = getGlobalTemplateSettings(testCandidate.getTestSection().getSkipSection().getFeatures());
- if (globalTemplateSettings.isEmpty() == false) {
- boolean useComponentTemplate = ESRestTestCase.has(ProductFeature.LEGACY_TEMPLATES) == false;
+ if (globalTemplateSettings.isEmpty() == false && ESRestTestCase.has(ProductFeature.LEGACY_TEMPLATES)) {
final XContentBuilder template = jsonBuilder();
template.startObject();
{
template.array("index_patterns", "*");
- if (useComponentTemplate) {
- template.field("priority", 4); // relatively low priority, but hopefully uncommon enough not to conflict
- template.startObject("template");
- }
template.startObject("settings");
globalTemplateSettings.toXContent(template, ToXContent.EMPTY_PARAMS);
template.endObject();
- if (useComponentTemplate) {
- template.endObject();
- }
}
template.endObject();
- final Request request = new Request("PUT", useComponentTemplate ? "/_index_template/global" : "/_template/global");
+ final Request request = new Request("PUT", "/_template/global");
request.setJsonEntity(Strings.toString(template));
// Because not all case have transitioned to a composable template, it's possible that
// this can overlap an installed composable template since this is a global (*)
@@ -512,9 +504,7 @@ public void test() throws IOException {
// to be permissive in this case. This can be removed once all tests use composable
// templates instead of legacy templates
RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
- if (useComponentTemplate == false) {
- builder.setWarningsHandler(WarningsHandler.PERMISSIVE);
- }
+ builder.setWarningsHandler(WarningsHandler.PERMISSIVE);
request.setOptions(builder.build());
adminClient().performRequest(request);
}
diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java
index 1e6a9a9998a82..b9a6edfb958f3 100644
--- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java
+++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/ResourceUtils.java
@@ -13,20 +13,15 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
public class ResourceUtils {
public static final String APM_TEMPLATE_VERSION_VARIABLE = "xpack.apmdata.template.version";
static byte[] loadVersionedResourceUTF8(String name, int version) {
- return loadVersionedResourceUTF8(name, version, Map.of());
- }
-
- static byte[] loadVersionedResourceUTF8(String name, int version, Map variables) {
try {
String content = loadResource(name);
- content = TemplateUtils.replaceVariables(content, String.valueOf(version), APM_TEMPLATE_VERSION_VARIABLE, variables);
+ content = TemplateUtils.replaceVariable(content, APM_TEMPLATE_VERSION_VARIABLE, String.valueOf(version));
return content.getBytes(StandardCharsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException(e);
diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java
index de1b715dd138d..938fd69f80abe 100644
--- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java
+++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/YamlIngestPipelineConfig.java
@@ -31,6 +31,6 @@ public XContentType getXContentType() {
@Override
public BytesReference loadConfig() {
- return new BytesArray(loadVersionedResourceUTF8("/ingest-pipelines/" + id + ".yaml", version, variables));
+ return new BytesArray(loadVersionedResourceUTF8("/ingest-pipelines/" + id + ".yaml", version));
}
}
diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/java/org/elasticsearch/xpack/apmdata/APMYamlTestSuiteIT.java b/x-pack/plugin/apm-data/src/yamlRestTest/java/org/elasticsearch/xpack/apmdata/APMYamlTestSuiteIT.java
index 5835a41479a68..77cac16a4e90c 100644
--- a/x-pack/plugin/apm-data/src/yamlRestTest/java/org/elasticsearch/xpack/apmdata/APMYamlTestSuiteIT.java
+++ b/x-pack/plugin/apm-data/src/yamlRestTest/java/org/elasticsearch/xpack/apmdata/APMYamlTestSuiteIT.java
@@ -10,11 +10,13 @@
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+import org.apache.lucene.tests.util.LuceneTestCase;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.junit.ClassRule;
+@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101929")
public class APMYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
@ClassRule
diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java
index 31baba05c3b09..6dc940d191685 100644
--- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java
+++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java
@@ -1238,6 +1238,7 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception {
}
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100957")
public void testCancelViaTasksAPI() throws Exception {
Map testClusterInfo = setupTwoClusters();
String localIndex = (String) testClusterInfo.get("local.index");
diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java
index fc8eedbe1ca75..7eb3cca18efd0 100644
--- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java
+++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java
@@ -22,6 +22,7 @@
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.RoutingNode;
@@ -670,7 +671,7 @@ long sizeOf(ShardRouting shard) {
}
private long getExpectedShardSize(ShardRouting shard) {
- return DiskThresholdDecider.getExpectedShardSize(shard, 0L, info, shardSizeInfo, state.metadata(), state.routingTable());
+ return ExpectedShardSizeEstimator.getExpectedShardSize(shard, 0L, info, shardSizeInfo, state.metadata(), state.routingTable());
}
long unmovableSize(String nodeId, Collection shards) {
diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java
new file mode 100644
index 0000000000000..b4830ca97938f
--- /dev/null
+++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.blobcache;
+
+import org.elasticsearch.telemetry.TelemetryProvider;
+import org.elasticsearch.telemetry.metric.LongCounter;
+import org.elasticsearch.telemetry.metric.LongHistogram;
+import org.elasticsearch.telemetry.metric.MeterRegistry;
+
+public class BlobCacheMetrics {
+ private final LongCounter cacheMissCounter;
+ private final LongHistogram cacheMissLoadTimes;
+
+ public BlobCacheMetrics(MeterRegistry meterRegistry) {
+ this(
+ meterRegistry.registerLongCounter(
+ "elasticsearch.blob_cache.miss_that_triggered_read",
+ "The number of times there was a cache miss that triggered a read from the blob store",
+ "count"
+ ),
+ meterRegistry.registerLongHistogram(
+ "elasticsearch.blob_cache.cache_miss_load_times",
+ "The timing data for populating entries in the blob store resulting from a cache miss.",
+ "count"
+ )
+ );
+ }
+
+ BlobCacheMetrics(LongCounter cacheMissCounter, LongHistogram cacheMissLoadTimes) {
+ this.cacheMissCounter = cacheMissCounter;
+ this.cacheMissLoadTimes = cacheMissLoadTimes;
+ }
+
+ public static BlobCacheMetrics NOOP = new BlobCacheMetrics(TelemetryProvider.NOOP.getMeterRegistry());
+
+ public LongCounter getCacheMissCounter() {
+ return cacheMissCounter;
+ }
+
+ public LongHistogram getCacheMissLoadTimes() {
+ return cacheMissLoadTimes;
+ }
+}
diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java
index 2538ee613b96f..7740e500344f2 100644
--- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java
+++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java
@@ -13,6 +13,7 @@
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.RefCountingListener;
+import org.elasticsearch.blobcache.BlobCacheMetrics;
import org.elasticsearch.blobcache.BlobCacheUtils;
import org.elasticsearch.blobcache.common.ByteRange;
import org.elasticsearch.blobcache.common.SparseFileTracker;
@@ -298,8 +299,16 @@ private CacheEntry(T chunk) {
private final LongAdder evictCount = new LongAdder();
- public SharedBlobCacheService(NodeEnvironment environment, Settings settings, ThreadPool threadPool, String ioExecutor) {
- this(environment, settings, threadPool, ioExecutor, ioExecutor);
+ private final BlobCacheMetrics blobCacheMetrics;
+
+ public SharedBlobCacheService(
+ NodeEnvironment environment,
+ Settings settings,
+ ThreadPool threadPool,
+ String ioExecutor,
+ BlobCacheMetrics blobCacheMetrics
+ ) {
+ this(environment, settings, threadPool, ioExecutor, ioExecutor, blobCacheMetrics);
}
public SharedBlobCacheService(
@@ -307,7 +316,8 @@ public SharedBlobCacheService(
Settings settings,
ThreadPool threadPool,
String ioExecutor,
- String bulkExecutor
+ String bulkExecutor,
+ BlobCacheMetrics blobCacheMetrics
) {
this.threadPool = threadPool;
this.ioExecutor = threadPool.executor(ioExecutor);
@@ -347,6 +357,8 @@ public SharedBlobCacheService(
this.rangeSize = SHARED_CACHE_RANGE_SIZE_SETTING.get(settings);
this.recoveryRangeSize = SHARED_CACHE_RECOVERY_RANGE_SIZE_SETTING.get(settings);
+
+ this.blobCacheMetrics = blobCacheMetrics;
}
public static long calculateCacheSize(Settings settings, long totalFsSize) {
@@ -795,6 +807,20 @@ public int populateAndRead(
final RangeAvailableHandler reader,
final RangeMissingHandler writer
) throws Exception {
+ // We are interested in the total time that the system spends when fetching a result (including time spent queuing), so we start
+ // our measurement here.
+ final long startTime = threadPool.relativeTimeInMillis();
+ RangeMissingHandler writerInstrumentationDecorator = (
+ SharedBytes.IO channel,
+ int channelPos,
+ int relativePos,
+ int length,
+ IntConsumer progressUpdater) -> {
+ writer.fillCacheRange(channel, channelPos, relativePos, length, progressUpdater);
+ var elapsedTime = threadPool.relativeTimeInMillis() - startTime;
+ SharedBlobCacheService.this.blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime);
+ SharedBlobCacheService.this.blobCacheMetrics.getCacheMissCounter().increment();
+ };
if (rangeToRead.isEmpty()) {
// nothing to read, skip
return 0;
@@ -802,9 +828,9 @@ public int populateAndRead(
final int startRegion = getRegion(rangeToWrite.start());
final int endRegion = getEndingRegion(rangeToWrite.end());
if (startRegion == endRegion) {
- return readSingleRegion(rangeToWrite, rangeToRead, reader, writer, startRegion);
+ return readSingleRegion(rangeToWrite, rangeToRead, reader, writerInstrumentationDecorator, startRegion);
}
- return readMultiRegions(rangeToWrite, rangeToRead, reader, writer, startRegion, endRegion);
+ return readMultiRegions(rangeToWrite, rangeToRead, reader, writerInstrumentationDecorator, startRegion, endRegion);
}
private int readSingleRegion(
diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java
index 300f9f1d8efa9..cd9bb5b5934c8 100644
--- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java
+++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java
@@ -10,6 +10,7 @@
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.PlainActionFuture;
+import org.elasticsearch.blobcache.BlobCacheMetrics;
import org.elasticsearch.blobcache.common.ByteRange;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.common.settings.Setting;
@@ -66,7 +67,13 @@ public void testBasicEviction() throws IOException {
final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue();
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, taskQueue.getThreadPool(), ThreadPool.Names.GENERIC)
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ taskQueue.getThreadPool(),
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
+ )
) {
final var cacheKey = generateCacheKey();
assertEquals(5, cacheService.freeRegionCount());
@@ -126,7 +133,13 @@ public void testAutoEviction() throws IOException {
final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue();
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, taskQueue.getThreadPool(), ThreadPool.Names.GENERIC)
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ taskQueue.getThreadPool(),
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
+ )
) {
final var cacheKey = generateCacheKey();
assertEquals(2, cacheService.freeRegionCount());
@@ -164,7 +177,13 @@ public void testForceEviction() throws IOException {
final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue();
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, taskQueue.getThreadPool(), ThreadPool.Names.GENERIC)
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ taskQueue.getThreadPool(),
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
+ )
) {
final var cacheKey1 = generateCacheKey();
final var cacheKey2 = generateCacheKey();
@@ -192,7 +211,13 @@ public void testForceEvictResponse() throws IOException {
final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue();
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, taskQueue.getThreadPool(), ThreadPool.Names.GENERIC)
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ taskQueue.getThreadPool(),
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
+ )
) {
final var cacheKey1 = generateCacheKey();
final var cacheKey2 = generateCacheKey();
@@ -219,7 +244,13 @@ public void testDecay() throws IOException {
final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue();
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, taskQueue.getThreadPool(), ThreadPool.Names.GENERIC)
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ taskQueue.getThreadPool(),
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
+ )
) {
final var cacheKey1 = generateCacheKey();
final var cacheKey2 = generateCacheKey();
@@ -284,7 +315,13 @@ public void testGetMultiThreaded() throws IOException {
Set files = randomSet(1, 10, () -> randomAlphaOfLength(5));
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService(environment, settings, threadPool, ThreadPool.Names.GENERIC)
+ var cacheService = new SharedBlobCacheService(
+ environment,
+ settings,
+ threadPool,
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
+ )
) {
CyclicBarrier ready = new CyclicBarrier(threads);
List threadList = IntStream.range(0, threads).mapToObj(no -> {
@@ -364,7 +401,14 @@ public void execute(Runnable command) {
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, threadPool, ThreadPool.Names.GENERIC, "bulk")
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ threadPool,
+ ThreadPool.Names.GENERIC,
+ "bulk",
+ BlobCacheMetrics.NOOP
+ )
) {
{
final var cacheKey = generateCacheKey();
@@ -418,7 +462,14 @@ public ExecutorService executor(String name) {
try (
NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
- var cacheService = new SharedBlobCacheService<>(environment, settings, threadPool, ThreadPool.Names.GENERIC, "bulk")
+ var cacheService = new SharedBlobCacheService<>(
+ environment,
+ settings,
+ threadPool,
+ ThreadPool.Names.GENERIC,
+ "bulk",
+ BlobCacheMetrics.NOOP
+ )
) {
final long size = size(randomIntBetween(1, 100));
@@ -620,7 +671,8 @@ public void testCacheSizeChanges() throws IOException {
environment,
settings,
taskQueue.getThreadPool(),
- ThreadPool.Names.GENERIC
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
)
) {
assertEquals(val1.getBytes(), cacheService.getStats().size());
@@ -637,7 +689,8 @@ public void testCacheSizeChanges() throws IOException {
environment,
settings,
taskQueue.getThreadPool(),
- ThreadPool.Names.GENERIC
+ ThreadPool.Names.GENERIC,
+ BlobCacheMetrics.NOOP
)
) {
assertEquals(val2.getBytes(), cacheService.getStats().size());
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java
index 6f6cc6c259e34..746172fef18f3 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java
@@ -43,6 +43,7 @@
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.core.Releasables;
import org.elasticsearch.core.Streams;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
@@ -63,7 +64,6 @@
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.nio.CharBuffer;
import java.util.Base64;
@@ -215,54 +215,15 @@ public SecurityContext getSecurityContext() {
* TODO: add limit for stored async response in EQL, and instead of this method use createResponse
*/
public void createResponseForEQL(String docId, Map headers, R response, ActionListener listener) {
- try {
- final ReleasableBytesStreamOutput buffer = new ReleasableBytesStreamOutput(0, bigArrays.withCircuitBreaking());
- final XContentBuilder source = XContentFactory.jsonBuilder(buffer);
- listener = ActionListener.runBefore(listener, buffer::close);
- source.startObject()
- .field(HEADERS_FIELD, headers)
- .field(EXPIRATION_TIME_FIELD, response.getExpirationTime())
- .directFieldAsBase64(RESULT_FIELD, os -> writeResponse(response, os))
- .endObject();
-
- // do not close the buffer or the XContentBuilder until the IndexRequest is completed (i.e., listener is notified);
- // otherwise, we underestimate the memory usage in case the circuit breaker does not use the real memory usage.
- source.flush();
- final IndexRequest indexRequest = new IndexRequest(index).create(true).id(docId).source(buffer.bytes(), source.contentType());
- clientWithOrigin.index(indexRequest, listener);
- } catch (Exception e) {
- listener.onFailure(e);
- }
+ indexResponse(docId, headers, response, false, listener);
}
/**
* Stores the initial response with the original headers of the authenticated user
* and the expected expiration time.
*/
- public void createResponse(String docId, Map headers, R response, ActionListener listener)
- throws IOException {
- try {
- final ReleasableBytesStreamOutput buffer = new ReleasableBytesStreamOutputWithLimit(
- 0,
- bigArrays.withCircuitBreaking(),
- maxResponseSize
- );
- final XContentBuilder source = XContentFactory.jsonBuilder(buffer);
- listener = ActionListener.runBefore(listener, buffer::close);
- source.startObject()
- .field(HEADERS_FIELD, headers)
- .field(EXPIRATION_TIME_FIELD, response.getExpirationTime())
- .directFieldAsBase64(RESULT_FIELD, os -> writeResponse(response, os))
- .endObject();
-
- // do not close the buffer or the XContentBuilder until the IndexRequest is completed (i.e., listener is notified);
- // otherwise, we underestimate the memory usage in case the circuit breaker does not use the real memory usage.
- source.flush();
- final IndexRequest indexRequest = new IndexRequest(index).create(true).id(docId).source(buffer.bytes(), source.contentType());
- clientWithOrigin.index(indexRequest, listener);
- } catch (Exception e) {
- listener.onFailure(e);
- }
+ public void createResponse(String docId, Map headers, R response, ActionListener listener) {
+ indexResponse(docId, headers, response, true, listener);
}
public void updateResponse(
@@ -274,6 +235,27 @@ public void updateResponse(
updateResponse(docId, responseHeaders, response, listener, false);
}
+ private void indexResponse(
+ String docId,
+ Map headers,
+ R response,
+ boolean limitToMaxResponseSize,
+ ActionListener listener
+ ) {
+ try {
+ var buffer = allocateBuffer(limitToMaxResponseSize);
+ listener = ActionListener.runBefore(listener, buffer::close);
+ final XContentBuilder source = XContentFactory.jsonBuilder(buffer)
+ .startObject()
+ .field(HEADERS_FIELD, headers)
+ .field(EXPIRATION_TIME_FIELD, response.getExpirationTime());
+ addResultFieldAndFinish(response, source);
+ clientWithOrigin.index(new IndexRequest(index).create(true).id(docId).source(buffer.bytes(), source.contentType()), listener);
+ } catch (Exception e) {
+ listener.onFailure(e);
+ }
+ }
+
/**
* Stores the final response if the place-holder document is still present (update).
*/
@@ -284,25 +266,18 @@ private void updateResponse(
ActionListener listener,
boolean isFailure
) {
+ ReleasableBytesStreamOutput buffer = null;
try {
- final ReleasableBytesStreamOutput buffer = isFailure
- ? new ReleasableBytesStreamOutput(0, bigArrays.withCircuitBreaking())
- : new ReleasableBytesStreamOutputWithLimit(0, bigArrays.withCircuitBreaking(), maxResponseSize);
- final XContentBuilder source = XContentFactory.jsonBuilder(buffer);
- listener = ActionListener.runBefore(listener, buffer::close);
- source.startObject()
- .field(RESPONSE_HEADERS_FIELD, responseHeaders)
- .directFieldAsBase64(RESULT_FIELD, os -> writeResponse(response, os))
- .endObject();
- // do not close the buffer or the XContentBuilder until the UpdateRequest is completed (i.e., listener is notified);
- // otherwise, we underestimate the memory usage in case the circuit breaker does not use the real memory usage.
- source.flush();
- final UpdateRequest request = new UpdateRequest().index(index)
- .id(docId)
- .doc(buffer.bytes(), source.contentType())
- .retryOnConflict(5);
- clientWithOrigin.update(request, listener);
+ buffer = allocateBuffer(isFailure == false);
+ final XContentBuilder source = XContentFactory.jsonBuilder(buffer).startObject().field(RESPONSE_HEADERS_FIELD, responseHeaders);
+ addResultFieldAndFinish(response, source);
+ clientWithOrigin.update(
+ new UpdateRequest().index(index).id(docId).doc(buffer.bytes(), source.contentType()).retryOnConflict(5),
+ ActionListener.runBefore(listener, buffer::close)
+ );
} catch (Exception e) {
+ // release buffer right away to save memory, particularly in case the exception came from the circuit breaker
+ Releasables.close(buffer);
// even if we expect updating with a failure always succeed
// this is just an extra precaution not to create infinite loops
if (isFailure) {
@@ -311,14 +286,13 @@ private void updateResponse(
Throwable cause = ExceptionsHelper.unwrapCause(e);
if (cause instanceof DocumentMissingException == false && cause instanceof VersionConflictEngineException == false) {
logger.error(() -> "failed to store async-search [" + docId + "]", e);
- ActionListener newListener = listener;
- updateStoredResponseWithFailure(
+ // at end, we should report a failure to the listener
+ updateResponse(
docId,
responseHeaders,
- response,
- e,
- // at end, we should report a failure to the listener
- ActionListener.running(() -> newListener.onFailure(e))
+ response.convertToFailure(e),
+ ActionListener.running(() -> listener.onFailure(e)),
+ true
);
} else {
listener.onFailure(e);
@@ -327,18 +301,29 @@ private void updateResponse(
}
}
- /**
- * Update the initial stored response with a failure
- */
- private void updateStoredResponseWithFailure(
- String docId,
- Map