diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy new file mode 100644 index 0000000000000..4c542d371c32c --- /dev/null +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import org.elasticsearch.gradle.fixtures.AbstractGradleInternalPluginFuncTest +import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin +import org.gradle.testkit.runner.TaskOutcome + +class DocsTestPluginFuncTest extends AbstractGradleInternalPluginFuncTest { + Class pluginClassUnderTest = DocsTestPlugin.class + + def setup() { + File docDir = new File(projectDir, 'doc'); + docDir.mkdirs() + addSampleDoc(docDir) + buildApiRestrictionsDisabled = true + configurationCacheCompatible = false; + buildFile << """ +tasks.named('listSnippets') { + docs = fileTree('doc') +} + +tasks.named('listConsoleCandidates') { + docs = fileTree('doc') +} +""" + } + + def "can list snippets"() { + when: + def result = gradleRunner("listSnippets").build() + then: + result.task(":listSnippets").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listSnippets +mapper-annotated-text.asciidoc[37:39](Painless) +mapper-annotated-text.asciidoc[42:44](js) +mapper-annotated-text.asciidoc[51:69](console)// TEST[setup:seats] +""") + } + + def "can console candidates"() { + when: + def result = gradleRunner("listConsoleCandidates").build() + then: + result.task(":listConsoleCandidates").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listConsoleCandidates +mapper-annotated-text.asciidoc[42:44](js) +""") + } + + void addSampleDoc(File docFolder) { + new File(docFolder, "mapper-annotated-text.asciidoc").text = """ +[[painless-filter-context]] +=== Filter context + +Use a Painless script as a {ref}/query-dsl-script-query.html[filter] in a +query to include and exclude documents. + + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +*Return* + +`boolean`:: + Return `true` if the current document should be returned as a result of + the query, and `false` otherwise. + + +*API* + +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +This script finds all unsold documents that cost less than \$25. + +[source,Painless] +---- +doc['sold'].value == false && doc['cost'].value < 25 +---- + +[source,js] +---- +curl 'hello world' +---- + +Defining `cost` as a script parameter enables the cost to be configured +in the script query request. For example, the following request finds +all available theatre seats for evening performances that are under \$25. + +[source,console] +---- +GET seats/_search +{ + "query": { + "bool": { + "filter": { + "script": { + "script": { + "source": "doc['sold'].value == false && doc['cost'].value < params.cost", + "params": { + "cost": 25 + } + } + } + } + } + } +} +---- +// TEST[setup:seats] +""" + } +} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy deleted file mode 100644 index 38b4cb499eeb9..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc - -import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask -import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask -import org.gradle.api.Action -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.file.Directory -import org.gradle.api.file.ProjectLayout -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.provider.Provider -import org.gradle.api.tasks.TaskProvider - -import javax.inject.Inject - -/** - * Sets up tests for documentation. - */ -class DocsTestPlugin implements Plugin { - - private FileOperations fileOperations - private ProjectLayout projectLayout - - @Inject - DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { - this.projectLayout = projectLayout - this.fileOperations = fileOperations - } - - @Override - void apply(Project project) { - project.pluginManager.apply('elasticsearch.legacy-yaml-rest-test') - - String distribution = System.getProperty('tests.distribution', 'default') - // The distribution can be configured with -Dtests.distribution on the command line - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { testDistribution = distribution.toUpperCase() } - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { nameCustomization = { it.replace("yamlRestTest", "node") } } - // Docs are published separately so no need to assemble - project.tasks.named("assemble").configure {enabled = false } - Map commonDefaultSubstitutions = [ - /* These match up with the asciidoc syntax for substitutions but - * the values may differ. In particular {version} needs to resolve - * to the version being built for testing but needs to resolve to - * the last released version for docs. */ - '\\{version\\}': Version.fromString(VersionProperties.elasticsearch).toString(), - '\\{version_qualified\\}': VersionProperties.elasticsearch, - '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), - '\\{build_flavor\\}' : distribution, - '\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(), - ] - project.tasks.register('listSnippets', SnippetsTask) { - group 'Docs' - description 'List each snippet' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - println(snippet.toString()) - } - } - } - project.tasks.register('listConsoleCandidates', SnippetsTask) { - group 'Docs' - description - 'List snippets that probably should be marked // CONSOLE' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(it)) { - println(it.toString()) - } - } - } - } - - Provider restRootDir = projectLayout.buildDirectory.dir("rest") - TaskProvider buildRestTests = project.tasks.register('buildRestTests', RestTestsFromSnippetsTask) { - defaultSubstitutions = commonDefaultSubstitutions - testRoot.convention(restRootDir) - doFirst { - getFileOperations().delete(testRoot.get()) - } - } - - // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm - project.sourceSets.yamlRestTest.output.dir(restRootDir, builtBy: buildRestTests) - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy deleted file mode 100644 index 81207181dc9a7..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy +++ /dev/null @@ -1,503 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import groovy.transform.PackageScope -import org.elasticsearch.gradle.internal.doc.SnippetsTask.Snippet -import org.gradle.api.Action -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.DirectoryProperty -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.OutputDirectory -import org.gradle.api.model.ObjectFactory - -import javax.inject.Inject; -import java.nio.file.Files -import java.nio.file.Path - -/** - * Generates REST tests for each snippet marked // TEST. - */ -abstract class RestTestsFromSnippetsTask extends SnippetsTask { - /** - * These languages aren't supported by the syntax highlighter so we - * shouldn't use them. - */ - private static final List BAD_LANGUAGES = ['json', 'javascript'] - - /** - * Test setups defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map setups = new HashMap() - - /** - * Test teardowns defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map teardowns = new HashMap() - - /** - * A list of files that contain snippets that *probably* should be - * converted to `// CONSOLE` but have yet to be converted. If a file is in - * this list and doesn't contain unconverted snippets this task will fail. - * If there are unconverted snippets not in this list then this task will - * fail. All files are paths relative to the docs dir. - */ - @Input - List expectedUnconvertedCandidates = [] - - /** - * Root directory of the tests being generated. To make rest tests happy - * we generate them in a testRoot which is contained in this directory. - */ - private DirectoryProperty testRoot - - @Internal - Set names = new HashSet<>() - - @Inject - abstract FileOperations getFileOperations(); - - @Inject - RestTestsFromSnippetsTask(ObjectFactory objectFactory) { - testRoot = objectFactory.directoryProperty() - TestBuilder builder = new TestBuilder() - perSnippet = new Action() { - @Override - void execute(Snippet snippet) { - builder.handleSnippet(snippet) - } - } - doLast { - builder.checkUnconverted() - builder.finishLastTest() - } - } - - /** - * Root directory containing all the files generated by this task. It is - * contained within testRoot. - */ - File outputRoot() { - return new File(testRoot.get().asFile, '/rest-api-spec/test') - } - - @OutputDirectory - DirectoryProperty getTestRoot() { - return testRoot - } -/** - * Is this snippet a candidate for conversion to `// CONSOLE`? - */ - static isConsoleCandidate(Snippet snippet) { - /* Snippets that are responses or already marked as `// CONSOLE` or - * `// NOTCONSOLE` are not candidates. */ - if (snippet.console != null || snippet.testResponse) { - return false - } - /* js snippets almost always should be marked with `// CONSOLE`. js - * snippets that shouldn't be marked `// CONSOLE`, like examples for - * js client, should always be marked with `// NOTCONSOLE`. - * - * `sh` snippets that contain `curl` almost always should be marked - * with `// CONSOLE`. In the exceptionally rare cases where they are - * not communicating with Elasticsearch, like the examples in the ec2 - * and gce discovery plugins, the snippets should be marked - * `// NOTCONSOLE`. */ - return snippet.language == 'js' || snippet.curl - } - - /** - * Certain requests should not have the shard failure check because the - * format of the response is incompatible i.e. it is not a JSON object. - */ - static shouldAddShardFailureCheck(String path) { - return path.startsWith('_cat') == false && path.startsWith('_ml/datafeeds/') == false - } - - /** - * Converts Kibana's block quoted strings into standard JSON. These - * {@code """} delimited strings can be embedded in CONSOLE and can - * contain newlines and {@code "} without the normal JSON escaping. - * This has to add it. - */ - @PackageScope - static String replaceBlockQuote(String body) { - int start = body.indexOf('"""'); - if (start < 0) { - return body - } - /* - * 1.3 is a fairly wild guess of the extra space needed to hold - * the escaped string. - */ - StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); - int startOfNormal = 0; - while (start >= 0) { - int end = body.indexOf('"""', start + 3); - if (end < 0) { - throw new InvalidUserDataException( - "Invalid block quote starting at $start in:\n$body") - } - result.append(body.substring(startOfNormal, start)); - result.append('"'); - result.append(body.substring(start + 3, end) - .replace('"', '\\"') - .replace("\n", "\\n")); - result.append('"'); - startOfNormal = end + 3; - start = body.indexOf('"""', startOfNormal); - } - result.append(body.substring(startOfNormal)); - return result.toString(); - } - - private class TestBuilder { - private static final String SYNTAX = { - String method = /(?GET|PUT|POST|HEAD|OPTIONS|DELETE)/ - String pathAndQuery = /(?[^\n]+)/ - String badBody = /GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#/ - String body = /(?(?:\n(?!$badBody)[^\n]+)+)/ - String rawRequest = /(?:$method\s+$pathAndQuery$body?)/ - String yamlRequest = /(?:startyaml(?s)(?.+?)(?-s)endyaml)/ - String nonComment = /(?:$rawRequest|$yamlRequest)/ - String comment = /(?#.+)/ - /(?:$comment|$nonComment)\n+/ - }() - - /** - * The file in which we saw the last snippet that made a test. - */ - Path lastDocsPath - - /** - * The file we're building. - */ - PrintWriter current - - /** - * Files containing all snippets that *probably* should be converted - * to `// CONSOLE` but have yet to be converted. All files are paths - * relative to the docs dir. - */ - Set unconvertedCandidates = new HashSet<>() - - /** - * The last non-TESTRESPONSE snippet. - */ - Snippet previousTest - - /** - * Called each time a snippet is encountered. Tracks the snippets and - * calls buildTest to actually build the test. - */ - - void handleSnippet(Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(snippet)) { - unconvertedCandidates.add(snippet.path.toString() - .replace('\\', '/')) - } - if (BAD_LANGUAGES.contains(snippet.language)) { - throw new InvalidUserDataException( - "$snippet: Use `js` instead of `${snippet.language}`.") - } - if (snippet.testSetup) { - testSetup(snippet) - previousTest = snippet - return - } - if (snippet.testTearDown) { - testTearDown(snippet) - previousTest = snippet - return - } - if (snippet.testResponse || snippet.language == 'console-result') { - if (previousTest == null) { - throw new InvalidUserDataException("$snippet: No paired previous test") - } - if (previousTest.path != snippet.path) { - throw new InvalidUserDataException("$snippet: Result can't be first in file") - } - response(snippet) - return - } - if ((snippet.language == 'js') && (snippet.console)) { - throw new InvalidUserDataException( - "$snippet: Use `[source,console]` instead of `// CONSOLE`.") - } - if (snippet.test || snippet.language == 'console') { - test(snippet) - previousTest = snippet - return - } - // Must be an unmarked snippet.... - } - - private void test(Snippet test) { - setupCurrent(test) - - if (test.continued) { - /* Catch some difficult to debug errors with // TEST[continued] - * and throw a helpful error message. */ - if (previousTest == null || previousTest.path != test.path) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot be on first snippet in a file: $test") - } - if (previousTest != null && previousTest.testSetup) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TESTSETUP: $test") - } - if (previousTest != null && previousTest.testTearDown) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TEARDOWN: $test") - } - } else { - current.println('---') - if (test.name != null && test.name.isBlank() == false) { - if(names.add(test.name) == false) { - throw new InvalidUserDataException("Duplicated snippet name '$test.name': $test") - } - current.println("\"$test.name\":") - } else { - current.println("\"line_$test.start\":") - } - /* The Elasticsearch test runner doesn't support quite a few - * constructs unless we output this skip. We don't know if - * we're going to use these constructs, but we might so we - * output the skip just in case. */ - current.println(" - skip:") - current.println(" features: ") - current.println(" - default_shards") - current.println(" - stash_in_key") - current.println(" - stash_in_path") - current.println(" - stash_path_replace") - current.println(" - warnings") - } - if (test.skip) { - if (test.continued) { - throw new InvalidUserDataException("Continued snippets " - + "can't be skipped") - } - current.println(" - always_skip") - current.println(" reason: $test.skip") - } - if (test.setup != null) { - setup(test) - } - - body(test, false) - - if (test.teardown != null) { - teardown(test) - } - } - - private void setup(final Snippet snippet) { - // insert a setup defined outside of the docs - for (final String name : snippet.setup.split(',')) { - final String setup = setups[name] - if (setup == null) { - throw new InvalidUserDataException( - "Couldn't find named setup $name for $snippet" - ) - } - current.println("# Named setup ${name}") - current.println(setup) - } - } - - private void teardown(final Snippet snippet) { - // insert a teardown defined outside of the docs - for (final String name : snippet.teardown.split(',')) { - final String teardown = teardowns[name] - if (teardown == null) { - throw new InvalidUserDataException( - "Couldn't find named teardown $name for $snippet" - ) - } - current.println("# Named teardown ${name}") - current.println(teardown) - } - } - - private void response(Snippet response) { - if (null == response.skip) { - current.println(" - match: ") - current.println(" \$body: ") - replaceBlockQuote(response.contents).eachLine { - current.println(" $it") - } - } - } - - void emitDo(String method, String pathAndQuery, String body, - String catchPart, List warnings, boolean inSetup, boolean skipShardFailures) { - def (String path, String query) = pathAndQuery.tokenize('?') - if (path == null) { - path = '' // Catch requests to the root... - } else { - path = path.replace('<', '%3C').replace('>', '%3E') - } - current.println(" - do:") - if (catchPart != null) { - current.println(" catch: $catchPart") - } - if (false == warnings.isEmpty()) { - current.println(" warnings:") - for (String warning in warnings) { - // Escape " because we're going to quote the warning - String escaped = warning.replaceAll('"', '\\\\"') - /* Quote the warning in case it starts with [ which makes - * it look too much like an array. */ - current.println(" - \"$escaped\"") - } - } - current.println(" raw:") - current.println(" method: $method") - current.println(" path: \"$path\"") - if (query != null) { - for (String param: query.tokenize('&')) { - def (String name, String value) = param.tokenize('=') - if (value == null) { - value = '' - } - current.println(" $name: \"$value\"") - } - } - if (body != null) { - // Throw out the leading newline we get from parsing the body - body = body.substring(1) - // Replace """ quoted strings with valid json ones - body = replaceBlockQuote(body) - current.println(" body: |") - body.eachLine { current.println(" $it") } - } - /* Catch any shard failures. These only cause a non-200 response if - * no shard succeeds. But we need to fail the tests on all of these - * because they mean invalid syntax or broken queries or something - * else that we don't want to teach people to do. The REST test - * framework doesn't allow us to have assertions in the setup - * section so we have to skip it there. We also omit the assertion - * from APIs that don't return a JSON object - */ - if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { - current.println(" - is_false: _shards.failures") - } - } - - private void testSetup(Snippet snippet) { - if (lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet: wasn't first. TESTSETUP can only be used in the first snippet of a document.") - } - setupCurrent(snippet) - current.println('---') - current.println("setup:") - if (snippet.setup != null) { - setup(snippet) - } - body(snippet, true) - } - - private void testTearDown(Snippet snippet) { - if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet must follow test setup or be first") - } - setupCurrent(snippet) - current.println('---') - current.println('teardown:') - body(snippet, true) - } - - private void body(Snippet snippet, boolean inSetup) { - parse("$snippet", snippet.contents, SYNTAX) { matcher, last -> - if (matcher.group("comment") != null) { - // Comment - return - } - String yamlRequest = matcher.group("yaml"); - if (yamlRequest != null) { - current.println(yamlRequest) - return - } - String method = matcher.group("method") - String pathAndQuery = matcher.group("pathAndQuery") - String body = matcher.group("body") - String catchPart = last ? snippet.catchPart : null - if (pathAndQuery.startsWith('/')) { - // Leading '/'s break the generated paths - pathAndQuery = pathAndQuery.substring(1) - } - emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, - inSetup, snippet.skipShardsFailures) - } - } - - private PrintWriter setupCurrent(Snippet test) { - if (lastDocsPath == test.path) { - return - } - names.clear() - finishLastTest() - lastDocsPath = test.path - - // Make the destination file: - // Shift the path into the destination directory tree - Path dest = outputRoot().toPath().resolve(test.path) - // Replace the extension - String fileName = dest.getName(dest.nameCount - 1) - dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yml')) - - // Now setup the writer - Files.createDirectories(dest.parent) - current = dest.newPrintWriter('UTF-8') - } - - void finishLastTest() { - if (current != null) { - current.close() - current = null - } - } - - void checkUnconverted() { - List listedButNotFound = [] - for (String listed : expectedUnconvertedCandidates) { - if (false == unconvertedCandidates.remove(listed)) { - listedButNotFound.add(listed) - } - } - String message = "" - if (false == listedButNotFound.isEmpty()) { - Collections.sort(listedButNotFound) - listedButNotFound = listedButNotFound.collect {' ' + it} - message += "Expected unconverted snippets but none found in:\n" - message += listedButNotFound.join("\n") - } - if (false == unconvertedCandidates.isEmpty()) { - List foundButNotListed = - new ArrayList<>(unconvertedCandidates) - Collections.sort(foundButNotListed) - foundButNotListed = foundButNotListed.collect {' ' + it} - if (false == "".equals(message)) { - message += "\n" - } - message += "Unexpected unconverted snippets:\n" - message += foundButNotListed.join("\n") - } - if (false == "".equals(message)) { - throw new InvalidUserDataException(message); - } - } - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy deleted file mode 100644 index 3e4ad91024082..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy +++ /dev/null @@ -1,438 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonToken - -import org.gradle.api.Action; -import org.gradle.api.DefaultTask -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.ConfigurableFileTree -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.TaskAction - -import java.nio.file.Path -import java.util.regex.Matcher - -/** - * A task which will run a closure on each snippet in the documentation. - */ -class SnippetsTask extends DefaultTask { - private static final String SCHAR = /(?:\\\/|[^\/])/ - private static final String SUBSTITUTION = /s\/($SCHAR+)\/($SCHAR*)\// - private static final String CATCH = /catch:\s*((?:\/[^\/]+\/)|[^ \]]+)/ - private static final String SKIP_REGEX = /skip:([^\]]+)/ - private static final String SETUP = /setup:([^ \]]+)/ - private static final String TEARDOWN = /teardown:([^ \]]+)/ - private static final String WARNING = /warning:(.+)/ - private static final String NON_JSON = /(non_json)/ - private static final String TEST_SYNTAX = - /(?:$CATCH|$SUBSTITUTION|$SKIP_REGEX|(continued)|$SETUP|$TEARDOWN|$WARNING|(skip_shard_failures)) ?/ - - /** - * Action to take on each snippet. Called with a single parameter, an - * instance of Snippet. - */ - @Internal - Action perSnippet - - /** - * The docs to scan. Defaults to every file in the directory exception the - * build.gradle file because that is appropriate for Elasticsearch's docs - * directory. - */ - @InputFiles - ConfigurableFileTree docs - - /** - * Substitutions done on every snippet's contents. - */ - @Input - Map defaultSubstitutions = [:] - - @TaskAction - void executeTask() { - /* - * Walks each line of each file, building snippets as it encounters - * the lines that make up the snippet. - */ - for (File file: docs) { - String lastLanguage - String name - int lastLanguageLine - Snippet snippet = null - StringBuilder contents = null - List substitutions = null - Closure emit = { - snippet.contents = contents.toString() - contents = null - Closure doSubstitution = { String pattern, String subst -> - /* - * $body is really common but it looks like a - * backreference so we just escape it here to make the - * tests cleaner. - */ - subst = subst.replace('$body', '\\$body') - subst = subst.replace('$_path', '\\$_path') - // \n is a new line.... - subst = subst.replace('\\n', '\n') - snippet.contents = snippet.contents.replaceAll( - pattern, subst) - } - defaultSubstitutions.each doSubstitution - if (substitutions != null) { - substitutions.each doSubstitution - substitutions = null - } - if (snippet.language == null) { - throw new InvalidUserDataException("$snippet: " - + "Snippet missing a language. This is required by " - + "Elasticsearch's doc testing infrastructure so we " - + "be sure we don't accidentally forget to test a " - + "snippet.") - } - // Try to detect snippets that contain `curl` - if (snippet.language == 'sh' || snippet.language == 'shell') { - snippet.curl = snippet.contents.contains('curl') - if (snippet.console == false && snippet.curl == false) { - throw new InvalidUserDataException("$snippet: " - + "No need for NOTCONSOLE if snippet doesn't " - + "contain `curl`.") - } - } - if (snippet.testResponse - && ('js' == snippet.language || 'console-result' == snippet.language) - && null == snippet.skip) { - String quoted = snippet.contents - // quote values starting with $ - .replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"') - // quote fields starting with $ - .replaceAll(/(\$[^ ,\n}]+)\s*:/, '"$1":') - - JsonFactory jf = new JsonFactory(); - jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,true); - JsonParser jsonParser; - - try { - jsonParser = jf.createParser(quoted); - while(jsonParser.isClosed() == false) { - jsonParser.nextToken(); - } - } catch (JsonParseException e) { - throw new InvalidUserDataException("Invalid json in " - + snippet.toString() + ". The error is:\n" + e.getMessage() + ".\n" - + "After substitutions and munging, the json looks like:\n" + quoted, e); - } - } - perSnippet.execute(snippet) - snippet = null - } - file.eachLine('UTF-8') { String line, int lineNumber -> - Matcher matcher - if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet - if (snippet == null) { - Path path = docs.dir.toPath().relativize(file.toPath()) - snippet = new Snippet(path: path, start: lineNumber, name: name) - if (lastLanguageLine == lineNumber - 1) { - snippet.language = lastLanguage - } - name = null - } else { - snippet.end = lineNumber - } - return - } - def source = matchSource(line) - if (source.matches) { - lastLanguage = source.language - lastLanguageLine = lineNumber - name = source.name - return - } - if (line ==~ /\/\/\s*AUTOSENSE\s*/) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "AUTOSENSE has been replaced by CONSOLE.") - } - if (line ==~ /\/\/\s*CONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "CONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = true - return - } - if (line ==~ /\/\/\s*NOTCONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "NOTCONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = false - return - } - matcher = line =~ /\/\/\s*TEST(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TEST not paired with a snippet at ") - } - snippet.test = true - if (matcher.group(2) != null) { - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), TEST_SYNTAX) { - if (it.group(1) != null) { - snippet.catchPart = it.group(1) - return - } - if (it.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - substitutions.add([it.group(2), it.group(3)]) - return - } - if (it.group(4) != null) { - snippet.skip = it.group(4) - return - } - if (it.group(5) != null) { - snippet.continued = true - return - } - if (it.group(6) != null) { - snippet.setup = it.group(6) - return - } - if (it.group(7) != null) { - snippet.teardown = it.group(7) - return - } - if (it.group(8) != null) { - snippet.warnings.add(it.group(8)) - return - } - if (it.group(9) != null) { - snippet.skipShardsFailures = true - return - } - throw new InvalidUserDataException( - "Invalid test marker: $line") - } - } - return - } - matcher = line =~ /\/\/\s*TESTRESPONSE(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TESTRESPONSE not paired with a snippet") - } - snippet.testResponse = true - if (matcher.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$NON_JSON|$SKIP_REGEX) ?/) { - if (it.group(1) != null) { - // TESTRESPONSE[s/adsf/jkl/] - substitutions.add([it.group(1), it.group(2)]) - } else if (it.group(3) != null) { - // TESTRESPONSE[non_json] - substitutions.add(['^', '/']) - substitutions.add(['\n$', '\\\\s*/']) - substitutions.add(['( +)', '$1\\\\s+']) - substitutions.add(['\n', '\\\\s*\n ']) - } else if (it.group(4) != null) { - // TESTRESPONSE[skip:reason] - snippet.skip = it.group(4) - } - } - } - return - } - if (line ==~ /\/\/\s*TESTSETUP\s*/) { - snippet.testSetup = true - return - } - if (line ==~ /\/\/\s*TEARDOWN\s*/) { - snippet.testTearDown = true - return - } - if (snippet == null) { - // Outside - return - } - if (snippet.end == Snippet.NOT_FINISHED) { - // Inside - if (contents == null) { - contents = new StringBuilder() - } - // We don't need the annotations - line = line.replaceAll(/<\d+>/, '') - // Nor any trailing spaces - line = line.replaceAll(/\s+$/, '') - contents.append(line).append('\n') - return - } - // Allow line continuations for console snippets within lists - if (snippet != null && line.trim() == '+') { - return - } - // Just finished - emit() - } - if (snippet != null) emit() - } - } - - static Source matchSource(String line) { - def matcher = line =~ /\["?source"?(?:\.[^,]+)?,\s*"?([-\w]+)"?(,((?!id=).)*(id="?([-\w]+)"?)?(.*))?].*/ - if(matcher.matches()){ - return new Source(matches: true, language: matcher.group(1), name: matcher.group(5)) - } - return new Source(matches: false) - } - - static class Source { - boolean matches - String language - String name - } - - static class Snippet { - static final int NOT_FINISHED = -1 - - /** - * Path to the file containing this snippet. Relative to docs.dir of the - * SnippetsTask that created it. - */ - Path path - int start - int end = NOT_FINISHED - String contents - - Boolean console = null - boolean test = false - boolean testResponse = false - boolean testSetup = false - boolean testTearDown = false - String skip = null - boolean continued = false - String language = null - String catchPart = null - String setup = null - String teardown = null - boolean curl - List warnings = new ArrayList() - boolean skipShardsFailures = false - String name - - @Override - public String toString() { - String result = "$path[$start:$end]" - if (language != null) { - result += "($language)" - } - if (console != null) { - result += console ? '// CONSOLE' : '// NOTCONSOLE' - } - if (test) { - result += '// TEST' - if (catchPart) { - result += "[catch: $catchPart]" - } - if (skip) { - result += "[skip=$skip]" - } - if (continued) { - result += '[continued]' - } - if (setup) { - result += "[setup:$setup]" - } - if (teardown) { - result += "[teardown:$teardown]" - } - for (String warning in warnings) { - result += "[warning:$warning]" - } - if (skipShardsFailures) { - result += '[skip_shard_failures]' - } - } - if (testResponse) { - result += '// TESTRESPONSE' - if (skip) { - result += "[skip=$skip]" - } - } - if (testSetup) { - result += '// TESTSETUP' - } - if (curl) { - result += '(curl)' - } - return result - } - } - - /** - * Repeatedly match the pattern to the string, calling the closure with the - * matchers each time there is a match. If there are characters that don't - * match then blow up. If the closure takes two parameters then the second - * one is "is this the last match?". - */ - protected parse(String location, String s, String pattern, Closure c) { - if (s == null) { - return // Silly null, only real stuff gets to match! - } - Matcher m = s =~ pattern - int offset = 0 - Closure extraContent = { message -> - StringBuilder cutOut = new StringBuilder() - cutOut.append(s[offset - 6..offset - 1]) - cutOut.append('*') - cutOut.append(s[offset..Math.min(offset + 5, s.length() - 1)]) - String cutOutNoNl = cutOut.toString().replace('\n', '\\n') - throw new InvalidUserDataException("$location: Extra content " - + "$message ('$cutOutNoNl') matching [$pattern]: $s") - } - while (m.find()) { - if (m.start() != offset) { - extraContent("between [$offset] and [${m.start()}]") - } - offset = m.end() - if (c.maximumNumberOfParameters == 1) { - c(m) - } else { - c(m, offset == s.length()) - } - } - if (offset == 0) { - throw new InvalidUserDataException("$location: Didn't match " - + "$pattern: $s") - } - if (offset != s.length()) { - extraContent("after [$offset]") - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java new file mode 100644 index 0000000000000..7b35fd29fbd1a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class AsciidocSnippetParser implements SnippetParser { + public static final Pattern SNIPPET_PATTERN = Pattern.compile("-{4,}\\s*"); + + private static final String CATCH = "catch:\\s*((?:\\/[^\\/]+\\/)|[^ \\]]+)"; + private static final String SKIP_REGEX = "skip:([^\\]]+)"; + private static final String SETUP = "setup:([^ \\]]+)"; + private static final String TEARDOWN = "teardown:([^ \\]]+)"; + private static final String WARNING = "warning:(.+)"; + private static final String NON_JSON = "(non_json)"; + private static final String SCHAR = "(?:\\\\\\/|[^\\/])"; + private static final String SUBSTITUTION = "s\\/(" + SCHAR + "+)\\/(" + SCHAR + "*)\\/"; + private static final String TEST_SYNTAX = "(?:" + + CATCH + + "|" + + SUBSTITUTION + + "|" + + SKIP_REGEX + + "|(continued)|" + + SETUP + + "|" + + TEARDOWN + + "|" + + WARNING + + "|(skip_shard_failures)) ?"; + + private final Map defaultSubstitutions; + + public AsciidocSnippetParser(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + @Override + public List parseDoc(File rootDir, File docFile, List> substitutions) { + String lastLanguage = null; + Snippet snippet = null; + String name = null; + int lastLanguageLine = 0; + StringBuilder contents = null; + List snippets = new ArrayList<>(); + + try (Stream lines = Files.lines(docFile.toPath(), StandardCharsets.UTF_8)) { + List linesList = lines.collect(Collectors.toList()); + for (int lineNumber = 0; lineNumber < linesList.size(); lineNumber++) { + String line = linesList.get(lineNumber); + if (SNIPPET_PATTERN.matcher(line).matches()) { + if (snippet == null) { + Path path = rootDir.toPath().relativize(docFile.toPath()); + snippet = new Snippet(path, lineNumber + 1, name); + snippets.add(snippet); + if (lastLanguageLine == lineNumber - 1) { + snippet.language = lastLanguage; + } + name = null; + } else { + snippet.end = lineNumber + 1; + } + continue; + } + + Source source = matchSource(line); + if (source.matches) { + lastLanguage = source.language; + lastLanguageLine = lineNumber; + name = source.name; + continue; + } + if (consoleHandled(docFile.getName(), lineNumber, line, snippet)) { + continue; + } + if (testHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (testResponseHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (line.matches("\\/\\/\s*TESTSETUP\s*")) { + snippet.testSetup = true; + continue; + } + if (line.matches("\\/\\/\s*TEARDOWN\s*")) { + snippet.testTearDown = true; + continue; + } + if (snippet == null) { + // Outside + continue; + } + if (snippet.end == Snippet.NOT_FINISHED) { + // Inside + if (contents == null) { + contents = new StringBuilder(); + } + // We don't need the annotations + line = line.replaceAll("<\\d+>", ""); + // Nor any trailing spaces + line = line.replaceAll("\s+$", ""); + contents.append(line).append("\n"); + continue; + } + // Allow line continuations for console snippets within lists + if (snippet != null && line.trim().equals("+")) { + continue; + } + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + substitutions = new ArrayList<>(); + ; + snippet = null; + contents = null; + } + if (snippet != null) { + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + contents = null; + snippet = null; + substitutions = new ArrayList<>(); + } + } catch (IOException e) { + e.printStackTrace(); + } + return snippets; + } + + static Snippet finalizeSnippet( + final Snippet snippet, + String contents, + Map defaultSubstitutions, + Collection> substitutions + ) { + snippet.contents = contents.toString(); + snippet.validate(); + escapeSubstitutions(snippet, defaultSubstitutions, substitutions); + return snippet; + } + + private static void escapeSubstitutions( + Snippet snippet, + Map defaultSubstitutions, + Collection> substitutions + ) { + BiConsumer doSubstitution = (pattern, subst) -> { + /* + * $body is really common but it looks like a + * backreference so we just escape it here to make the + * tests cleaner. + */ + subst = subst.replace("$body", "\\$body"); + subst = subst.replace("$_path", "\\$_path"); + subst = subst.replace("\\n", "\n"); + snippet.contents = snippet.contents.replaceAll(pattern, subst); + }; + defaultSubstitutions.forEach(doSubstitution); + + if (substitutions != null) { + substitutions.forEach(e -> doSubstitution.accept(e.getKey(), e.getValue())); + } + } + + private boolean testResponseHandled( + String name, + int lineNumber, + String line, + Snippet snippet, + final List> substitutions + ) { + Matcher matcher = Pattern.compile("\\/\\/\s*TESTRESPONSE(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TESTRESPONSE not paired with a snippet at "); + } + snippet.testResponse = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse( + loc, + matcher.group(2), + "(?:" + SUBSTITUTION + "|" + NON_JSON + "|" + SKIP_REGEX + ") ?", + (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + // TESTRESPONSE[s/adsf/jkl/] + substitutions.add(Map.entry(m.group(1), m.group(2))); + } else if (m.group(3) != null) { + // TESTRESPONSE[non_json] + substitutions.add(Map.entry("^", "/")); + substitutions.add(Map.entry("\n$", "\\\\s*/")); + substitutions.add(Map.entry("( +)", "$1\\\\s+")); + substitutions.add(Map.entry("\n", "\\\\s*\n ")); + } else if (m.group(4) != null) { + // TESTRESPONSE[skip:reason] + snippet.skip = m.group(4); + } + } + ); + } + return true; + } + return false; + } + + private boolean testHandled(String name, int lineNumber, String line, Snippet snippet, List> substitutions) { + Matcher matcher = Pattern.compile("\\/\\/\s*TEST(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TEST not paired with a snippet at "); + } + snippet.test = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse(loc, matcher.group(2), TEST_SYNTAX, (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + snippet.catchPart = m.group(1); + return; + } + if (m.group(2) != null) { + substitutions.add(Map.entry(m.group(2), m.group(3))); + return; + } + if (m.group(4) != null) { + snippet.skip = m.group(4); + return; + } + if (m.group(5) != null) { + snippet.continued = true; + return; + } + if (m.group(6) != null) { + snippet.setup = m.group(6); + return; + } + if (m.group(7) != null) { + snippet.teardown = m.group(7); + return; + } + if (m.group(8) != null) { + snippet.warnings.add(m.group(8)); + return; + } + if (m.group(9) != null) { + snippet.skipShardsFailures = true; + return; + } + throw new InvalidUserDataException("Invalid test marker: " + line); + }); + } + return true; + } + return false; + } + + private boolean consoleHandled(String fileName, int lineNumber, String line, Snippet snippet) { + if (line.matches("\\/\\/\s*CONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": CONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = true; + return true; + } else if (line.matches("\\/\\/\s*NOTCONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": NOTCONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = false; + return true; + } + return false; + } + + static Source matchSource(String line) { + Pattern pattern = Pattern.compile("\\[\"?source\"?(?:\\.[^,]+)?,\\s*\"?([-\\w]+)\"?(,((?!id=).)*(id=\"?([-\\w]+)\"?)?(.*))?].*"); + Matcher matcher = pattern.matcher(line); + if (matcher.matches()) { + return new Source(true, matcher.group(1), matcher.group(5)); + } + return new Source(false, null, null); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java new file mode 100644 index 0000000000000..87f0621d53fba --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.apache.commons.collections.map.HashedMap; +import org.gradle.api.Action; +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ConfigurableFileTree; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.TaskAction; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public abstract class DocSnippetTask extends DefaultTask { + + /** + * Action to take on each snippet. Called with a single parameter, an + * instance of Snippet. + */ + private Action perSnippet; + + /** + * The docs to scan. Defaults to every file in the directory exception the + * build.gradle file because that is appropriate for Elasticsearch's docs + * directory. + */ + private ConfigurableFileTree docs; + private Map defaultSubstitutions = new HashedMap(); + + @InputFiles + public ConfigurableFileTree getDocs() { + return docs; + } + + public void setDocs(ConfigurableFileTree docs) { + this.docs = docs; + } + + /** + * Substitutions done on every snippet's contents. + */ + @Input + public Map getDefaultSubstitutions() { + return defaultSubstitutions; + } + + @TaskAction + void executeTask() { + for (File file : docs) { + List snippets = parseDocFile(docs.getDir(), file, new ArrayList<>()); + if (perSnippet != null) { + snippets.forEach(perSnippet::execute); + } + } + } + + List parseDocFile(File rootDir, File docFile, List> substitutions) { + SnippetParser parser = parserForFileType(docFile); + return parser.parseDoc(rootDir, docFile, substitutions); + } + + private SnippetParser parserForFileType(File docFile) { + if (docFile.getName().endsWith(".asciidoc")) { + return new AsciidocSnippetParser(defaultSubstitutions); + } + throw new InvalidUserDataException("Unsupported file type: " + docFile.getName()); + } + + public void setDefaultSubstitutions(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + public void setPerSnippet(Action perSnippet) { + this.perSnippet = perSnippet; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java new file mode 100644 index 0000000000000..bbb5102dd6699 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.elasticsearch.gradle.OS; +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; +import org.elasticsearch.gradle.testclusters.TestClustersPlugin; +import org.elasticsearch.gradle.testclusters.TestDistribution; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.file.Directory; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.TaskProvider; + +import java.util.Map; + +import javax.inject.Inject; + +public class DocsTestPlugin implements Plugin { + private FileOperations fileOperations; + private ProjectLayout projectLayout; + + @Inject + DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { + this.projectLayout = projectLayout; + this.fileOperations = fileOperations; + } + + @Override + public void apply(Project project) { + project.getPluginManager().apply("elasticsearch.legacy-yaml-rest-test"); + + String distribution = System.getProperty("tests.distribution", "default"); + // The distribution can be configured with -Dtests.distribution on the command line + NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project + .getExtensions() + .getByName(TestClustersPlugin.EXTENSION_NAME); + + testClusters.matching((c) -> c.getName().equals("yamlRestTest")).configureEach(c -> { + c.setTestDistribution(TestDistribution.valueOf(distribution.toUpperCase())); + c.setNameCustomization((name) -> name.replace("yamlRestTest", "node")); + }); + + project.getTasks().named("assemble").configure(task -> { task.setEnabled(false); }); + + Map commonDefaultSubstitutions = Map.of( + /* These match up with the asciidoc syntax for substitutions but + * the values may differ. In particular {version} needs to resolve + * to the version being built for testing but needs to resolve to + * the last released version for docs. */ + "\\{version\\}", + Version.fromString(VersionProperties.getElasticsearch()).toString(), + "\\{version_qualified\\}", + VersionProperties.getElasticsearch(), + "\\{lucene_version\\}", + VersionProperties.getLucene().replaceAll("-snapshot-\\w+$", ""), + "\\{build_flavor\\}", + distribution, + "\\{build_type\\}", + OS.conditionalString().onWindows(() -> "zip").onUnix(() -> "tar").supply() + ); + + project.getTasks().register("listSnippets", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List each snippet"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> System.out.println(snippet)); + }); + + project.getTasks().register("listConsoleCandidates", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List snippets that probably should be marked // CONSOLE"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> { + if (snippet.isConsoleCandidate()) { + System.out.println(snippet); + } + }); + }); + + Provider restRootDir = projectLayout.getBuildDirectory().dir("rest"); + TaskProvider buildRestTests = project.getTasks() + .register("buildRestTests", RestTestsFromDocSnippetTask.class, task -> { + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.getTestRoot().convention(restRootDir); + task.doFirst(task1 -> fileOperations.delete(restRootDir.get())); + }); + + // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm + JavaPluginExtension byType = project.getExtensions().getByType(JavaPluginExtension.class); + byType.getSourceSets().getByName("yamlRestTest").getOutput().dir(Map.of("builtBy", buildRestTests), restRootDir); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java new file mode 100644 index 0000000000000..b17dd4c7e21d3 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ParsingUtils { + + static void extraContent(String message, String content, int offset, String location, String pattern) { + StringBuilder cutOut = new StringBuilder(); + cutOut.append(content.substring(offset - 6, offset)); + cutOut.append('*'); + cutOut.append(content.substring(offset, Math.min(offset + 5, content.length()))); + String cutOutNoNl = cutOut.toString().replace("\n", "\\n"); + throw new InvalidUserDataException( + location + ": Extra content " + message + " ('" + cutOutNoNl + "') matching [" + pattern + "]: " + content + ); + } + + /** + * Repeatedly match the pattern to the string, calling the closure with the + * matchers each time there is a match. If there are characters that don't + * match then blow up. If the closure takes two parameters then the second + * one is "is this the last match?". + */ + static void parse(String location, String content, String pattern, BiConsumer testHandler) { + if (content == null) { + return; // Silly null, only real stuff gets to match! + } + Matcher m = Pattern.compile(pattern).matcher(content); + int offset = 0; + while (m.find()) { + if (m.start() != offset) { + extraContent("between [$offset] and [${m.start()}]", content, offset, location, pattern); + } + offset = m.end(); + testHandler.accept(m, offset == content.length()); + } + if (offset == 0) { + throw new InvalidUserDataException(location + ": Didn't match " + pattern + ": " + content); + } + if (offset != content.length()) { + extraContent("after [" + offset + "]", content, offset, location, pattern); + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java new file mode 100644 index 0000000000000..c5b1d67627dd9 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java @@ -0,0 +1,526 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import groovy.transform.PackageScope; + +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.OutputDirectory; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +public abstract class RestTestsFromDocSnippetTask extends DocSnippetTask { + + private Map setups = new HashMap<>(); + + private Map teardowns = new HashMap(); + + /** + * Test setups defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getSetups() { + return setups; + } + + public void setSetups(Map setups) { + this.setups = setups; + } + + /** + * Test teardowns defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getTeardowns() { + return teardowns; + } + + public void setTeardowns(Map teardowns) { + this.teardowns = teardowns; + } + + /** + * A list of files that contain snippets that *probably* should be + * converted to `// CONSOLE` but have yet to be converted. If a file is in + * this list and doesn't contain unconverted snippets this task will fail. + * If there are unconverted snippets not in this list then this task will + * fail. All files are paths relative to the docs dir. + */ + private List expectedUnconvertedCandidates; + + @Input + public List getExpectedUnconvertedCandidates() { + return expectedUnconvertedCandidates; + } + + public void setExpectedUnconvertedCandidates(List expectedUnconvertedCandidates) { + this.expectedUnconvertedCandidates = expectedUnconvertedCandidates; + } + + /** + * Root directory of the tests being generated. To make rest tests happy + * we generate them in a testRoot which is contained in this directory. + */ + private DirectoryProperty testRoot; + + private Set names = new HashSet<>(); + + @Internal + public Set getNames() { + return names; + } + + public void setNames(Set names) { + this.names = names; + } + + @Inject + public abstract FileOperations getFileOperations(); + + /** + * Root directory containing all the files generated by this task. It is + * contained within testRoot. + */ + @OutputDirectory + File getOutputRoot() { + return new File(testRoot.get().getAsFile(), "/rest-api-spec/test"); + } + + @OutputDirectory + DirectoryProperty getTestRoot() { + return testRoot; + } + + @Inject + public RestTestsFromDocSnippetTask(ObjectFactory objectFactory) { + testRoot = objectFactory.directoryProperty(); + TestBuilder builder = new TestBuilder(); + + setPerSnippet(snippet -> builder.handleSnippet(snippet)); + doLast(task -> { + builder.finishLastTest(); + builder.checkUnconverted(); + }); + } + + /** + * Certain requests should not have the shard failure check because the + * format of the response is incompatible i.e. it is not a JSON object. + */ + static boolean shouldAddShardFailureCheck(String path) { + return path.startsWith("_cat") == false && path.startsWith("_ml/datafeeds/") == false; + } + + /** + * Converts Kibana's block quoted strings into standard JSON. These + * {@code """} delimited strings can be embedded in CONSOLE and can + * contain newlines and {@code "} without the normal JSON escaping. + * This has to add it. + */ + @PackageScope + static String replaceBlockQuote(String body) { + int start = body.indexOf("\"\"\""); + if (start < 0) { + return body; + } + /* + * 1.3 is a fairly wild guess of the extra space needed to hold + * the escaped string. + */ + StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); + int startOfNormal = 0; + while (start >= 0) { + int end = body.indexOf("\"\"\"", start + 3); + if (end < 0) { + throw new InvalidUserDataException("Invalid block quote starting at " + start + " in:\n" + body); + } + result.append(body.substring(startOfNormal, start)); + result.append('"'); + result.append(body.substring(start + 3, end).replace("\"", "\\\"").replace("\n", "\\n")); + result.append('"'); + startOfNormal = end + 3; + start = body.indexOf("\"\"\"", startOfNormal); + } + result.append(body.substring(startOfNormal)); + return result.toString(); + } + + private class TestBuilder { + /** + * These languages aren't supported by the syntax highlighter so we + * shouldn't use them. + */ + private static final List BAD_LANGUAGES = List.of("json", "javascript"); + + String method = "(?GET|PUT|POST|HEAD|OPTIONS|DELETE)"; + String pathAndQuery = "(?[^\\n]+)"; + + String badBody = "GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#"; + String body = "(?(?:\\n(?!" + badBody + ")[^\\n]+)+)"; + + String rawRequest = "(?:" + method + "\\s+" + pathAndQuery + body + "?)"; + + String yamlRequest = "(?:startyaml(?s)(?.+?)(?-s)endyaml)"; + String nonComment = "(?:" + rawRequest + "|" + yamlRequest + ")"; + String comment = "(?#.+)"; + + String SYNTAX = "(?:" + comment + "|" + nonComment + ")\\n+"; + + /** + * Files containing all snippets that *probably* should be converted + * to `// CONSOLE` but have yet to be converted. All files are paths + * relative to the docs dir. + */ + private Set unconvertedCandidates = new HashSet<>(); + + /** + * The last non-TESTRESPONSE snippet. + */ + Snippet previousTest; + + /** + * The file in which we saw the last snippet that made a test. + */ + Path lastDocsPath; + + /** + * The file we're building. + */ + PrintWriter current; + + Set names = new HashSet<>(); + + /** + * Called each time a snippet is encountered. Tracks the snippets and + * calls buildTest to actually build the test. + */ + public void handleSnippet(Snippet snippet) { + if (snippet.isConsoleCandidate()) { + unconvertedCandidates.add(snippet.path.toString().replace('\\', '/')); + } + if (BAD_LANGUAGES.contains(snippet.language)) { + throw new InvalidUserDataException(snippet + ": Use `js` instead of `" + snippet.language + "`."); + } + if (snippet.testSetup) { + testSetup(snippet); + previousTest = snippet; + return; + } + if (snippet.testTearDown) { + testTearDown(snippet); + previousTest = snippet; + return; + } + if (snippet.testResponse || snippet.language.equals("console-result")) { + if (previousTest == null) { + throw new InvalidUserDataException(snippet + ": No paired previous test"); + } + if (previousTest.path.equals(snippet.path) == false) { + throw new InvalidUserDataException(snippet + ": Result can't be first in file"); + } + response(snippet); + return; + } + if (("js".equals(snippet.language)) && snippet.console != null && snippet.console) { + throw new InvalidUserDataException(snippet + ": Use `[source,console]` instead of `// CONSOLE`."); + } + if (snippet.test || snippet.language.equals("console")) { + test(snippet); + previousTest = snippet; + return; + } + // Must be an unmarked snippet.... + } + + private void test(Snippet test) { + setupCurrent(test); + + if (test.continued) { + /* Catch some difficult to debug errors with // TEST[continued] + * and throw a helpful error message. */ + if (previousTest == null || previousTest.path.equals(test.path) == false) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot be on first snippet in a file: " + test); + } + if (previousTest != null && previousTest.testSetup) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TESTSETUP: " + test); + } + if (previousTest != null && previousTest.testTearDown) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TEARDOWN: " + test); + } + } else { + current.println("---"); + if (test.name != null && test.name.isBlank() == false) { + if (names.add(test.name) == false) { + throw new InvalidUserDataException("Duplicated snippet name '" + test.name + "': " + test); + } + current.println("\"" + test.name + "\":"); + } else { + current.println("\"line_" + test.start + "\":"); + } + /* The Elasticsearch test runner doesn't support quite a few + * constructs unless we output this skip. We don't know if + * we're going to use these constructs, but we might so we + * output the skip just in case. */ + current.println(" - skip:"); + current.println(" features:"); + current.println(" - default_shards"); + current.println(" - stash_in_key"); + current.println(" - stash_in_path"); + current.println(" - stash_path_replace"); + current.println(" - warnings"); + } + if (test.skip != null) { + if (test.continued) { + throw new InvalidUserDataException("Continued snippets " + "can't be skipped"); + } + current.println(" - always_skip"); + current.println(" reason: " + test.skip); + } + if (test.setup != null) { + setup(test); + } + + body(test, false); + + if (test.teardown != null) { + teardown(test); + } + } + + private void response(Snippet response) { + if (null == response.skip) { + current.println(" - match:"); + current.println(" $body:"); + replaceBlockQuote(response.contents).lines().forEach(line -> current.println(" " + line)); + } + } + + private void teardown(final Snippet snippet) { + // insert a teardown defined outside of the docs + for (final String name : snippet.teardown.split(",")) { + final String teardown = teardowns.get(name); + if (teardown == null) { + throw new InvalidUserDataException("Couldn't find named teardown $name for " + snippet); + } + current.println("# Named teardown " + name); + current.println(teardown); + } + } + + private void testTearDown(Snippet snippet) { + if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { + throw new InvalidUserDataException(snippet + " must follow test setup or be first"); + } + setupCurrent(snippet); + current.println("---"); + current.println("teardown:"); + body(snippet, true); + } + + void emitDo( + String method, + String pathAndQuery, + String body, + String catchPart, + List warnings, + boolean inSetup, + boolean skipShardFailures + ) { + String[] tokenized = pathAndQuery.split("\\?"); + String path = tokenized[0]; + String query = tokenized.length > 1 ? tokenized[1] : null; + if (path == null) { + path = ""; // Catch requests to the root... + } else { + path = path.replace("<", "%3C").replace(">", "%3E"); + } + current.println(" - do:"); + if (catchPart != null) { + current.println(" catch: " + catchPart); + } + if (false == warnings.isEmpty()) { + current.println(" warnings:"); + for (String warning : warnings) { + // Escape " because we're going to quote the warning + String escaped = warning.replaceAll("\"", "\\\\\""); + /* Quote the warning in case it starts with [ which makes + * it look too much like an array. */ + current.println(" - \"" + escaped + "\""); + } + } + current.println(" raw:"); + current.println(" method: " + method); + current.println(" path: \"" + path + "\""); + if (query != null) { + for (String param : query.split("&")) { + String[] tokenizedQuery = param.split("="); + String paramName = tokenizedQuery[0]; + String paramValue = tokenizedQuery.length > 1 ? tokenizedQuery[1] : null; + if (paramValue == null) { + paramValue = ""; + } + current.println(" " + paramName + ": \"" + paramValue + "\""); + } + } + if (body != null) { + // Throw out the leading newline we get from parsing the body + body = body.substring(1); + // Replace """ quoted strings with valid json ones + body = replaceBlockQuote(body); + current.println(" body: |"); + body.lines().forEach(line -> current.println(" " + line)); + } + /* Catch any shard failures. These only cause a non-200 response if + * no shard succeeds. But we need to fail the tests on all of these + * because they mean invalid syntax or broken queries or something + * else that we don't want to teach people to do. The REST test + * framework doesn't allow us to have assertions in the setup + * section so we have to skip it there. We also omit the assertion + * from APIs that don't return a JSON object + */ + if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { + current.println(" - is_false: _shards.failures"); + } + } + + private void body(Snippet snippet, boolean inSetup) { + ParsingUtils.parse(snippet.getLocation(), snippet.contents, SYNTAX, (matcher, last) -> { + if (matcher.group("comment") != null) { + // Comment + return; + } + String yamlRequest = matcher.group("yaml"); + if (yamlRequest != null) { + current.println(yamlRequest); + return; + } + String method = matcher.group("method"); + String pathAndQuery = matcher.group("pathAndQuery"); + String body = matcher.group("body"); + String catchPart = last ? snippet.catchPart : null; + if (pathAndQuery.startsWith("/")) { + // Leading '/'s break the generated paths + pathAndQuery = pathAndQuery.substring(1); + } + emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, inSetup, snippet.skipShardsFailures); + }); + + } + + private PrintWriter setupCurrent(Snippet test) { + if (test.path.equals(lastDocsPath)) { + return current; + } + names.clear(); + finishLastTest(); + lastDocsPath = test.path; + + // Make the destination file: + // Shift the path into the destination directory tree + Path dest = getOutputRoot().toPath().resolve(test.path); + // Replace the extension + String fileName = dest.getName(dest.getNameCount() - 1).toString(); + dest = dest.getParent().resolve(fileName.replace(".asciidoc", ".yml")); + + // Now setup the writer + try { + Files.createDirectories(dest.getParent()); + current = new PrintWriter(dest.toFile(), "UTF-8"); + return current; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void testSetup(Snippet snippet) { + if (lastDocsPath == snippet.path) { + throw new InvalidUserDataException( + snippet + ": wasn't first. TESTSETUP can only be used in the first snippet of a document." + ); + } + setupCurrent(snippet); + current.println("---"); + current.println("setup:"); + if (snippet.setup != null) { + setup(snippet); + } + body(snippet, true); + } + + private void setup(final Snippet snippet) { + // insert a setup defined outside of the docs + for (final String name : snippet.setup.split(",")) { + final String setup = setups.get(name); + if (setup == null) { + throw new InvalidUserDataException("Couldn't find named setup " + name + " for " + snippet); + } + current.println("# Named setup " + name); + current.println(setup); + } + } + + public void checkUnconverted() { + List listedButNotFound = new ArrayList<>(); + for (String listed : expectedUnconvertedCandidates) { + if (false == unconvertedCandidates.remove(listed)) { + listedButNotFound.add(listed); + } + } + String message = ""; + if (false == listedButNotFound.isEmpty()) { + Collections.sort(listedButNotFound); + listedButNotFound = listedButNotFound.stream().map(notfound -> " " + notfound).collect(Collectors.toList()); + message += "Expected unconverted snippets but none found in:\n"; + message += listedButNotFound.stream().collect(Collectors.joining("\n")); + } + if (false == unconvertedCandidates.isEmpty()) { + List foundButNotListed = new ArrayList<>(unconvertedCandidates); + Collections.sort(foundButNotListed); + foundButNotListed = foundButNotListed.stream().map(f -> " " + f).collect(Collectors.toList()); + if (false == "".equals(message)) { + message += "\n"; + } + message += "Unexpected unconverted snippets:\n"; + message += foundButNotListed.stream().collect(Collectors.joining("\n")); + } + if (false == "".equals(message)) { + throw new InvalidUserDataException(message); + } + } + + public void finishLastTest() { + if (current != null) { + current.close(); + current = null; + } + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java new file mode 100644 index 0000000000000..b8aa864734f44 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; + +import org.gradle.api.InvalidUserDataException; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +public class Snippet { + static final int NOT_FINISHED = -1; + + /** + * Path to the file containing this snippet. Relative to docs.dir of the + * SnippetsTask that created it. + */ + Path path; + int start; + int end = NOT_FINISHED; + public String contents; + + Boolean console = null; + boolean test = false; + boolean testResponse = false; + boolean testSetup = false; + boolean testTearDown = false; + String skip = null; + boolean continued = false; + String language = null; + String catchPart = null; + String setup = null; + String teardown = null; + boolean curl; + List warnings = new ArrayList(); + boolean skipShardsFailures = false; + String name; + + public Snippet(Path path, int start, String name) { + this.path = path; + this.start = start; + this.name = name; + } + + public void validate() { + if (language == null) { + throw new InvalidUserDataException( + name + + ": " + + "Snippet missing a language. This is required by " + + "Elasticsearch's doc testing infrastructure so we " + + "be sure we don't accidentally forget to test a " + + "snippet." + ); + } + assertValidCurlInput(); + assertValidJsonInput(); + } + + String getLocation() { + return path + "[" + start + ":" + end + "]"; + } + + private void assertValidCurlInput() { + // Try to detect snippets that contain `curl` + if ("sh".equals(language) || "shell".equals(language)) { + curl = contents.contains("curl"); + if (console == Boolean.FALSE && curl == false) { + throw new InvalidUserDataException(name + ": " + "No need for NOTCONSOLE if snippet doesn't " + "contain `curl`."); + } + } + } + + private void assertValidJsonInput() { + if (testResponse && ("js" == language || "console-result" == language) && null == skip) { + String quoted = contents + // quote values starting with $ + .replaceAll("([:,])\\s*(\\$[^ ,\\n}]+)", "$1 \"$2\"") + // quote fields starting with $ + .replaceAll("(\\$[^ ,\\n}]+)\\s*:", "\"$1\":"); + + JsonFactory jf = new JsonFactory(); + jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER, true); + JsonParser jsonParser; + + try { + jsonParser = jf.createParser(quoted); + while (jsonParser.isClosed() == false) { + jsonParser.nextToken(); + } + } catch (JsonParseException e) { + throw new InvalidUserDataException( + "Invalid json in " + + name + + ". The error is:\n" + + e.getMessage() + + ".\n" + + "After substitutions and munging, the json looks like:\n" + + quoted, + e + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public String toString() { + String result = path + "[" + start + ":" + end + "]"; + if (language != null) { + result += "(" + language + ")"; + } + if (console != null) { + result += console ? "// CONSOLE" : "// NOTCONSOLE"; + } + if (test) { + result += "// TEST"; + if (catchPart != null) { + result += "[catch: " + catchPart + "]"; + } + if (skip != null) { + result += "[skip=" + skip + "]"; + } + if (continued) { + result += "[continued]"; + } + if (setup != null) { + result += "[setup:" + setup + "]"; + } + if (teardown != null) { + result += "[teardown:" + teardown + "]"; + } + for (String warning : warnings) { + result += "[warning:" + warning + "]"; + } + if (skipShardsFailures) { + result += "[skip_shard_failures]"; + } + } + if (testResponse) { + result += "// TESTRESPONSE"; + if (skip != null) { + result += "[skip=" + skip + "]"; + } + } + if (testSetup) { + result += "// TESTSETUP"; + } + if (curl) { + result += "(curl)"; + } + return result; + } + + /** + * Is this snippet a candidate for conversion to `// CONSOLE`? + */ + boolean isConsoleCandidate() { + /* Snippets that are responses or already marked as `// CONSOLE` or + * `// NOTCONSOLE` are not candidates. */ + if (console != null || testResponse) { + return false; + } + /* js snippets almost always should be marked with `// CONSOLE`. js + * snippets that shouldn't be marked `// CONSOLE`, like examples for + * js client, should always be marked with `// NOTCONSOLE`. + * + * `sh` snippets that contain `curl` almost always should be marked + * with `// CONSOLE`. In the exceptionally rare cases where they are + * not communicating with Elasticsearch, like the examples in the ec2 + * and gce discovery plugins, the snippets should be marked + * `// NOTCONSOLE`. */ + return language.equals("js") || curl; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java new file mode 100644 index 0000000000000..064c1c460febf --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import java.io.File; +import java.util.List; +import java.util.Map; + +public interface SnippetParser { + List parseDoc(File rootDir, File docFile, List> substitutions); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java new file mode 100644 index 0000000000000..b7f2f01aa7987 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +public final class Source { + boolean matches; + String language; + String name; + + public Source(boolean matches, String language, String name) { + this.matches = matches; + this.language = language; + this.name = name; + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy new file mode 100644 index 0000000000000..b7ac363ef7ad3 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import spock.lang.Specification +import spock.lang.Unroll + +import org.gradle.api.InvalidUserDataException + +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.finalizeSnippet; +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.matchSource; + +class AsciidocParserSpec extends Specification { + + def testMatchSource() { + expect: + with(matchSource("[source,console]")) { + matches == true + language == "console" + name == null + } + + with(matchSource("[source,console,id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + with(matchSource("[source.merge.styled,esql]")) { + matches == true + language == "esql" + } + + with(matchSource("[source.merge.styled,foo-bar]")) { + matches == true + language == "foo-bar" + } + } + + @Unroll + def "checks for valid json for #languageParam"() { + given: + def snippet = snippet() { + language = languageParam + testResponse = true + } + def json = """{ + "name": "John Doe", + "age": 30, + "isMarried": true, + "address": { + "street": "123 Main Street", + "city": "Springfield", + "state": "IL", + "zip": "62701" + }, + "hobbies": ["Reading", "Cooking", "Traveling"] +}""" + when: + def result = finalizeSnippet(snippet, json, [:], [:].entrySet()) + then: + result != null + + when: + finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Invalid json in") + + when: + snippet.skip = "true" + result = finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + result != null + + where: + languageParam << ["js", "console-result"] + } + + def "test finalized snippet handles substitutions"() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content substDefault subst", [substDefault: "\$body"], [subst: 'substValue'].entrySet()) + then: + snippet.contents == "snippet-content \$body substValue" + } + + def snippetMustHaveLanguage() { + given: + def snippet = snippet() + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Snippet missing a language.") + } + + def testEmit() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + snippet.contents == "snippet-content" + } + + def testSnippetsWithCurl() { + given: + def snippet = snippet() { + language = "sh" + name = "snippet-name-1" + } + when: + finalizeSnippet(snippet, "curl substDefault subst", [:], [:].entrySet()) + then: + snippet.curl == true + } + + def "test snippets with no curl no console"() { + given: + def snippet = snippet() { + console = false + language = "shell" + } + when: + finalizeSnippet(snippet, "hello substDefault subst", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("No need for NOTCONSOLE if snippet doesn't contain `curl`") + } + + Snippet snippet(Closure configClosure = {}) { + def snippet = new Snippet(new File("SomePath").toPath(), 0, "snippet-name-1") + configClosure.delegate = snippet + configClosure() + return snippet + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..85ce3c1804474 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -0,0 +1,575 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString + +class DocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir + + def "handling test parsing multiple snippets per file"() { + given: + def project = ProjectBuilder.builder().build() + def task = project.tasks.register("docSnippetTask", DocSnippetTask).get() + when: + def substitutions = [] + def snippets = task.parseDocFile( + tempDir, docFile( + """ +[[mapper-annotated-text]] +=== Mapper annotated text plugin + +experimental[] + +The mapper-annotated-text plugin provides the ability to index text that is a +combination of free-text and special markup that is typically used to identify +items of interest such as people or organisations (see NER or Named Entity Recognition +tools). + + +The elasticsearch markup allows one or more additional tokens to be injected, unchanged, into the token +stream at the same position as the underlying text it annotates. + +:plugin_name: mapper-annotated-text +include::install_remove.asciidoc[] + +[[mapper-annotated-text-usage]] +==== Using the `annotated-text` field + +The `annotated-text` tokenizes text content as per the more common {ref}/text.html[`text`] field (see +"limitations" below) but also injects any marked-up annotation tokens directly into +the search index: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_field": { + "type": "annotated_text" + } + } + } +} +-------------------------- + +Such a mapping would allow marked-up text eg wikipedia articles to be indexed as both text +and structured tokens. The annotations use a markdown-like syntax using URL encoding of +one or more values separated by the `&` symbol. + + +We can use the "_analyze" api to test how an example annotation would be stored as tokens +in the search index: + + +[source,js] +-------------------------- +GET my-index-000001/_analyze +{ + "field": "my_field", + "text":"Investors in [Apple](Apple+Inc.) rejoiced." +} +-------------------------- +// NOTCONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "investors", + "start_offset": 0, + "end_offset": 9, + "type": "", + "position": 0 + }, + { + "token": "in", + "start_offset": 10, + "end_offset": 12, + "type": "", + "position": 1 + }, + { + "token": "Apple Inc.", <1> + "start_offset": 13, + "end_offset": 18, + "type": "annotation", + "position": 2 + }, + { + "token": "apple", + "start_offset": 13, + "end_offset": 18, + "type": "", + "position": 2 + }, + { + "token": "rejoiced", + "start_offset": 19, + "end_offset": 27, + "type": "", + "position": 3 + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> Note the whole annotation token `Apple Inc.` is placed, unchanged as a single token in +the token stream and at the same position (position 2) as the text token (`apple`) it annotates. + + +We can now perform searches for annotations using regular `term` queries that don't tokenize +the provided search values. Annotations are a more precise way of matching as can be seen +in this example where a search for `Beck` will not match `Jeff Beck` : + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "[Beck](Beck) announced a new tour"<1> +} + +PUT my-index-000001/_doc/2 +{ + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<2> +} + +# Example search +GET my-index-000001/_search +{ + "query": { + "term": { + "my_field": "Beck" <3> + } + } +} +-------------------------- + +<1> As well as tokenising the plain text into single words e.g. `beck`, here we +inject the single token value `Beck` at the same position as `beck` in the token stream. +<2> Note annotations can inject multiple tokens at the same position - here we inject both +the very specific value `Jeff Beck` and the broader term `Guitarist`. This enables +broader positional queries e.g. finding mentions of a `Guitarist` near to `strat`. +<3> A benefit of searching with these carefully defined annotation tokens is that a query for +`Beck` will not match document 2 that contains the tokens `jeff`, `beck` and `Jeff Beck` + +WARNING: Any use of `=` signs in annotation values eg `[Prince](person=Prince)` will +cause the document to be rejected with a parse failure. In future we hope to have a use for +the equals signs so wil actively reject documents that contain this today. + + +[[mapper-annotated-text-tips]] +==== Data modelling tips +===== Use structured and unstructured fields + +Annotations are normally a way of weaving structured information into unstructured text for +higher-precision search. + +`Entity resolution` is a form of document enrichment undertaken by specialist software or people +where references to entities in a document are disambiguated by attaching a canonical ID. +The ID is used to resolve any number of aliases or distinguish between people with the +same name. The hyperlinks connecting Wikipedia's articles are a good example of resolved +entity IDs woven into text. + +These IDs can be embedded as annotations in an annotated_text field but it often makes +sense to include them in dedicated structured fields to support discovery via aggregations: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_unstructured_text_field": { + "type": "annotated_text" + }, + "my_structured_people_field": { + "type": "text", + "fields": { + "keyword" : { + "type": "keyword" + } + } + } + } + } +} +-------------------------- + +Applications would then typically provide content and discover it as follows: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_unstructured_text_field": "[Shay](%40kimchy) created elasticsearch", + "my_twitter_handles": ["@kimchy"] <1> +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "elasticsearch OR logstash OR kibana",<2> + "default_field": "my_unstructured_text_field" + } + }, + "aggregations": { + \t"top_people" :{ + \t "significant_terms" : { <3> +\t "field" : "my_twitter_handles.keyword" + \t } + \t} + } +} +-------------------------- + +<1> Note the `my_twitter_handles` contains a list of the annotation values +also used in the unstructured text. (Note the annotated_text syntax requires escaping). +By repeating the annotation values in a structured field this application has ensured that +the tokens discovered in the structured field can be used for search and highlighting +in the unstructured field. +<2> In this example we search for documents that talk about components of the elastic stack +<3> We use the `my_twitter_handles` field here to discover people who are significantly +associated with the elastic stack. + +===== Avoiding over-matching annotations +By design, the regular text tokens and the annotation tokens co-exist in the same indexed +field but in rare cases this can lead to some over-matching. + +The value of an annotation often denotes a _named entity_ (a person, place or company). +The tokens for these named entities are inserted untokenized, and differ from typical text +tokens because they are normally: + +* Mixed case e.g. `Madonna` +* Multiple words e.g. `Jeff Beck` +* Can have punctuation or numbers e.g. `Apple Inc.` or `@kimchy` + +This means, for the most part, a search for a named entity in the annotated text field will +not have any false positives e.g. when selecting `Apple Inc.` from an aggregation result +you can drill down to highlight uses in the text without "over matching" on any text tokens +like the word `apple` in this context: + + the apple was very juicy + +However, a problem arises if your named entity happens to be a single term and lower-case e.g. the +company `elastic`. In this case, a search on the annotated text field for the token `elastic` +may match a text document such as this: + + they fired an elastic band + +To avoid such false matches users should consider prefixing annotation values to ensure +they don't name clash with text tokens e.g. + + [elastic](Company_elastic) released version 7.0 of the elastic stack today + + + + +[[mapper-annotated-text-highlighter]] +==== Using the `annotated` highlighter + +The `annotated-text` plugin includes a custom highlighter designed to mark up search hits +in a way which is respectful of the original markup: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "The cat sat on the [mat](sku3578)" +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "cats" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "annotated", <1> + "require_field_match": false + } + } + } +} +-------------------------- + +<1> The `annotated` highlighter type is designed for use with annotated_text fields + +The annotated highlighter is based on the `unified` highlighter and supports the same +settings but does not use the `pre_tags` or `post_tags` parameters. Rather than using +html-like markup such as `cat` the annotated highlighter uses the same +markdown-like syntax used for annotations and injects a key=value annotation where `_hit_term` +is the key and the matched search term is the value e.g. + + The [cat](_hit_term=cat) sat on the [mat](sku3578) + +The annotated highlighter tries to be respectful of any existing markup in the original +text: + +* If the search term matches exactly the location of an existing annotation then the +`_hit_term` key is merged into the url-like syntax used in the `(...)` part of the +existing annotation. +* However, if the search term overlaps the span of an existing annotation it would break +the markup formatting so the original annotation is removed in favour of a new annotation +with just the search hit information in the results. +* Any non-overlapping annotations in the original text are preserved in highlighter +selections + + +[[mapper-annotated-text-limitations]] +==== Limitations + +The annotated_text field type supports the same mapping settings as the `text` field type +but with the following exceptions: + +* No support for `fielddata` or `fielddata_frequency_filter` +* No support for `index_prefixes` or `index_phrases` indexing + +""" + ), substitutions + ) + then: + snippets*.test == [false, false, false, false, false, false, false] + snippets*.catchPart == [null, null, null, null, null, null, null] + } + + def "handling test parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.catchPart == ["/painless_explain_error/"] + substitutions.size() == 1 + substitutions[0].key == "_explain\\/1" + substitutions[0].value == "_explain\\/1?error_trace=false" + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ + +[source,console] +---- +PUT _snapshot/my_hdfs_repository +{ + "type": "hdfs", + "settings": { + "uri": "hdfs://namenode:8020/", + "path": "elasticsearch/repositories/my_hdfs_repository", + "conf.dfs.client.read.shortcircuit": "true" + } +} +---- +// TEST[skip:we don't have hdfs set up while testing this] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.skip == ["we don't have hdfs set up while testing this"] + } + + def "handling testresponse parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 1 + substitutions[0].key == "\\.\\.\\." + substitutions[0].value == + "\"script_stack\": \$body.error.caused_by.script_stack, \"script\": \$body.error.caused_by.script, \"lang\": \$body.error.caused_by.lang, \"position\": \$body.error.caused_by.position, \"caused_by\": \$body.error.caused_by.caused_by, \"reason\": \$body.error.caused_by.reason" + + when: + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[skip:no setup made for this example yet] +""" + ), [] + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + snippets*.skip == ["no setup made for this example yet"] + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,txt] +--------------------------------------------------------------------------- +my-index-000001 0 p RELOCATING 3014 31.1mb 192.168.56.10 H5dfFeA -> -> 192.168.56.30 bGG90GE +--------------------------------------------------------------------------- +// TESTRESPONSE[non_json] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 4 + } + + + def "handling console parsing"() { + when: + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- + +// $firstToken +---- +""" + ), [] + ) + then: + snippets*.console == [firstToken.equals("CONSOLE")] + + + when: + task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +// $firstToken +// $secondToken +---- +""" + ), [] + ) + then: + def e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:4: Can't be both CONSOLE and NOTCONSOLE" + + when: + task().parseDocFile( + tempDir, docFile( + """ +// $firstToken +// $secondToken +""" + ), [] + ) + then: + e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:1: $firstToken not paired with a snippet" + + where: + firstToken << ["CONSOLE", "NOTCONSOLE"] + secondToken << ["NOTCONSOLE", "CONSOLE"] + } + + def "test parsing snippet from doc"() { + def doc = docFile( + """ +[source,console] +---- +GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is ٢٥٠١٥" +} +---- +""" + ) + def snippets = task().parseDocFile(tempDir, doc, []) + expect: + snippets[0].start == 3 + snippets[0].language == "console" + normalizeString(snippets[0].contents, tempDir) == """GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is ٢٥٠١٥" +}""" + } + + File docFile(String docContent) { + def file = tempDir.toPath().resolve("mapping-charfilter.asciidoc").toFile() + file.text = docContent + return file + } + + + private DocSnippetTask task() { + ProjectBuilder.builder().build().tasks.register("docSnippetTask", DocSnippetTask).get() + } + +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..6e86cba235886 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -0,0 +1,833 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.replaceBlockQuote +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.shouldAddShardFailureCheck +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString + +class RestTestsFromDocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir; + + def "test simple block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"") == "\"foo\": \"bort baz\"" + } + + def "test multiple block quotes"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") == "\"foo\": \"bort baz\", \"bar\": \"other\"" + } + + def "test escaping in block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"") == "\"foo\": \"bort\\\" baz\"" + replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"") == "\"foo\": \"bort\\n baz\"" + } + + def "test invalid block quotes"() { + given: + String input = "\"foo\": \"\"\"bar\""; + when: + RestTestsFromDocSnippetTask.replaceBlockQuote(input); + then: + def e = thrown(InvalidUserDataException) + e.message == "Invalid block quote starting at 7 in:\n" + input + } + + def "test is doc write request"() { + expect: + shouldAddShardFailureCheck("doc-index/_search") == true + shouldAddShardFailureCheck("_cat") == false + shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview") == false + } + + def "can create rest tests from docs"() { + def build = ProjectBuilder.builder().build() + given: + def task = build.tasks.create("restTestFromSnippet", RestTestsFromDocSnippetTask) + task.expectedUnconvertedCandidates = ["ml-update-snapshot.asciidoc", "reference/security/authorization/run-as-privilege.asciidoc"] + docs() + task.docs = build.fileTree(new File(tempDir, "docs")) + task.testRoot.convention(build.getLayout().buildDirectory.dir("rest-tests")); + + when: + task.getActions().forEach { it.execute(task) } + def restSpec = new File(task.getTestRoot().get().getAsFile(), "rest-api-spec/test/painless-debugging.yml") + + then: + restSpec.exists() + normalizeString(restSpec.text, tempDir) == """--- +"line_22": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "hockey/_doc/1" + refresh: "" + body: | + {"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + - is_false: _shards.failures + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_explain/1" + error_trace: "false" + body: | + { + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } + } + - is_false: _shards.failures + - match: + \$body: + { + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + "script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason + }, + "status": 400 + } + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_update/1" + error_trace: "false" + body: | + { + "script": "Debug.explain(ctx._source)" + } + - is_false: _shards.failures + - match: + \$body: + { + "error" : { + "root_cause": \$body.error.root_cause, + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": \$body.error.caused_by.to_string, + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + "script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason + } + }, + "status": 400 + }""" + def restSpec2 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/ml-update-snapshot.yml") + restSpec2.exists() + normalizeString(restSpec2.text, tempDir) == """--- +"line_50": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - always_skip + reason: todo + - do: + raw: + method: POST + path: "_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update" + body: | + { + "description": "Snapshot 1", + "retain": true + } + - is_false: _shards.failures""" + def restSpec3 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/sql/getting-started.yml") + restSpec3.exists() + normalizeString(restSpec3.text, tempDir) == """--- +"line_10": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "library/_bulk" + refresh: "" + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + - is_false: _shards.failures + - do: + raw: + method: POST + path: "_sql" + format: "txt" + body: | + { + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" + } + - is_false: _shards.failures + - match: + \$body: + / /s+author /s+/| /s+name /s+/| /s+page_count /s+/| /s+release_date/s* + ---------------/+---------------/+---------------/+------------------------/s* + Dan /s+Simmons /s+/|Hyperion /s+/|482 /s+/|1989-05-26T00:00:00.000Z/s* + Frank /s+Herbert /s+/|Dune /s+/|604 /s+/|1965-06-01T00:00:00.000Z/s*/""" + def restSpec4 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/security/authorization/run-as-privilege.yml") + restSpec4.exists() + normalizeString(restSpec4.text, tempDir) == """--- +"line_51": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_director" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_114": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_admin_role" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_143": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_analyst_role" + refresh: "true" + body: | + { + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_170": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/admin_user" + refresh: "true" + body: | + { + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} + } + - is_false: _shards.failures +--- +"line_184": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/analyst_user" + refresh: "true" + body: | + { + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} + } + - is_false: _shards.failures""" +} + + File docFile(String fileName, String docContent) { + def file = tempDir.toPath().resolve(fileName).toFile() + file.parentFile.mkdirs() + file.text = docContent + return file + } + + + void docs() { + docFile( + "docs/reference/sql/getting-started.asciidoc", """ +[role="xpack"] +[[sql-getting-started]] +== Getting Started with SQL + +To start using {es-sql}, create +an index with some data to experiment with: + +[source,console] +-------------------------------------------------- +PUT /library/_bulk?refresh +{"index":{"_id": "Leviathan Wakes"}} +{"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} +{"index":{"_id": "Hyperion"}} +{"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} +{"index":{"_id": "Dune"}} +{"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} +-------------------------------------------------- + +And now you can execute SQL using the <>: + +[source,console] +-------------------------------------------------- +POST /_sql?format=txt +{ + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" +} +-------------------------------------------------- +// TEST[continued] + +Which should return something along the lines of: + +[source,text] +-------------------------------------------------- + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\\|/\\\\|/ s/\\+/\\\\+/] +// TESTRESPONSE[non_json] + +You can also use the <>. There is a script to start it +shipped in x-pack's bin directory: + +[source,bash] +-------------------------------------------------- +\$ ./bin/elasticsearch-sql-cli +-------------------------------------------------- + +From there you can run the same query: + +[source,sqlcli] +-------------------------------------------------- +sql> SELECT * FROM library WHERE release_date < '2000-01-01'; + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +""" + ) + docFile( + "docs/ml-update-snapshot.asciidoc", + """ +[role="xpack"] +[[ml-update-snapshot]] += Update model snapshots API +++++ +Update model snapshots +++++ + +Updates certain properties of a snapshot. + +[[ml-update-snapshot-request]] +== {api-request-title} + +`POST _ml/anomaly_detectors//model_snapshots//_update` + +[[ml-update-snapshot-prereqs]] +== {api-prereq-title} + +Requires the `manage_ml` cluster privilege. This privilege is included in the +`machine_learning_admin` built-in role. + +[[ml-update-snapshot-path-parms]] +== {api-path-parms-title} + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + +[[ml-update-snapshot-request-body]] +== {api-request-body-title} + +The following properties can be updated after the model snapshot is created: + +`description`:: +(Optional, string) A description of the model snapshot. + +`retain`:: +(Optional, Boolean) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] + + +[[ml-update-snapshot-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +-------------------------------------------------- +// TEST[skip:todo] + +When the snapshot is updated, you receive the following results: +[source,js] +---- +{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +---- + +""" + ) + + docFile( + "docs/painless-debugging.asciidoc", + """ + +[[painless-debugging]] +=== Painless Debugging + +==== Debug.Explain + +Painless doesn't have a +{wikipedia}/Read%E2%80%93eval%E2%80%93print_loop[REPL] +and while it'd be nice for it to have one day, it wouldn't tell you the +whole story around debugging painless scripts embedded in Elasticsearch because +the data that the scripts have access to or "context" is so important. For now +the best way to debug embedded scripts is by throwing exceptions at choice +places. While you can throw your own exceptions +(`throw new Exception('whatever')`), Painless's sandbox prevents you from +accessing useful information like the type of an object. So Painless has a +utility method, `Debug.explain` which throws the exception for you. For +example, you can use {ref}/search-explain.html[`_explain`] to explore the +context available to a {ref}/query-dsl-script-query.html[script query]. + +[source,console] +--------------------------------------------------------- +PUT /hockey/_doc/1?refresh +{"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + +POST /hockey/_explain/1 +{ + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } +} +--------------------------------------------------------- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +// The test system sends error_trace=true by default for easier debugging so +// we have to override it to get a normal shaped response + +Which shows that the class of `doc.first` is +`org.elasticsearch.index.fielddata.ScriptDocValues.Longs` by responding with: + +[source,console-result] +--------------------------------------------------------- +{ + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + ... + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason/] + +You can use the same trick to see that `_source` is a `LinkedHashMap` +in the `_update` API: + +[source,console] +--------------------------------------------------------- +POST /hockey/_update/1 +{ + "script": "Debug.explain(ctx._source)" +} +--------------------------------------------------------- +// TEST[continued s/_update\\/1/_update\\/1?error_trace=false/ catch:/painless_explain_error/] + +The response looks like: + +[source,console-result] +--------------------------------------------------------- +{ + "error" : { + "root_cause": ..., + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": "{gp=[26, 82, 1], last=gaudreau, assists=[17, 46, 0], first=johnny, goals=[9, 27, 1]}", + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + ... + } + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/"root_cause": \\.\\.\\./"root_cause": \$body.error.root_cause/] +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +// TESTRESPONSE[s/"to_string": ".+"/"to_string": \$body.error.caused_by.to_string/] + +Once you have a class you can go to <> to see a list of +available methods. + +""" + ) + docFile( + "docs/reference/security/authorization/run-as-privilege.asciidoc", + """[role="xpack"] +[[run-as-privilege]] += Submitting requests on behalf of other users + +{es} roles support a `run_as` privilege that enables an authenticated user to +submit requests on behalf of other users. For example, if your external +application is trusted to authenticate users, {es} can authenticate the external +application and use the _run as_ mechanism to issue authorized requests as +other users without having to re-authenticate each user. + +To "run as" (impersonate) another user, the first user (the authenticating user) +must be authenticated by a mechanism that supports run-as delegation. The second +user (the `run_as` user) must be authorized by a mechanism that supports +delegated run-as lookups by username. + +The `run_as` privilege essentially operates like a secondary form of +<>. Delegated authorization applies +to the authenticating user, and the `run_as` privilege applies to the user who +is being impersonated. + +Authenticating user:: +-- +For the authenticating user, the following realms (plus API keys) all support +`run_as` delegation: `native`, `file`, Active Directory, JWT, Kerberos, LDAP and +PKI. + +Service tokens, the {es} Token Service, SAML 2.0, and OIDC 1.0 do not +support `run_as` delegation. +-- + +`run_as` user:: +-- +{es} supports `run_as` for any realm that supports user lookup. +Not all realms support user lookup. Refer to the list of <> +and ensure that the realm you wish to use is configured in a manner that +supports user lookup. + +The `run_as` user must be retrieved from a <> - it is not +possible to run as a +<>, +<> or +<>. +-- + +To submit requests on behalf of other users, you need to have the `run_as` +privilege in your <>. For example, the following request +creates a `my_director` role that grants permission to submit request on behalf +of `jacknich` or `redeniro`: + +[source,console] +---- +POST /_security/role/my_director?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } +} +---- + +To submit a request as another user, you specify the user in the +`es-security-runas-user` request header. For example: + +[source,sh] +---- +curl -H "es-security-runas-user: jacknich" -u es-admin -X GET http://localhost:9200/ +---- + +The `run_as` user passed in through the `es-security-runas-user` header must be +available from a realm that supports delegated authorization lookup by username. +Realms that don't support user lookup can't be used by `run_as` delegation from +other realms. + +For example, JWT realms can authenticate external users specified in JWTs, and +execute requests as a `run_as` user in the `native` realm. {es} will retrieve the +indicated `runas` user and execute the request as that user using their roles. + +[[run-as-privilege-apply]] +== Apply the `run_as` privilege to roles +You can apply the `run_as` privilege when creating roles with the +<>. Users who are assigned +a role that contains the `run_as` privilege inherit all privileges from their +role, and can also submit requests on behalf of the indicated users. + +NOTE: Roles for the authenticated user and the `run_as` user are not merged. If +a user authenticates without specifying the `run_as` parameter, only the +authenticated user's roles are used. If a user authenticates and their roles +include the `run_as` parameter, only the `run_as` user's roles are used. + +After a user successfully authenticates to {es}, an authorization process determines whether the user behind an incoming request is allowed to run +that request. If the authenticated user has the `run_as` privilege in their list +of permissions and specifies the run-as header, {es} _discards_ the authenticated +user and associated roles. It then looks in each of the configured realms in the +realm chain until it finds the username that's associated with the `run_as` user, +and uses those roles to execute any requests. + +Consider an admin role and an analyst role. The admin role has higher privileges, +but might also want to submit requests as another user to test and verify their +permissions. + +First, we'll create an admin role named `my_admin_role`. This role has `manage` +<> on the entire cluster, and on a subset of +indices. This role also contains the `run_as` privilege, which enables any user +with this role to submit requests on behalf of the specified `analyst_user`. + +[source,console] +---- +POST /_security/role/my_admin_role?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } +} +---- + +Next, we'll create an analyst role named `my_analyst_role`, which has more +restricted `monitor` cluster privileges and `manage` privileges on a subset of +indices. + +[source,console] +---- +POST /_security/role/my_analyst_role?refresh=true +{ + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } +} +---- + +We'll create an administrator user and assign them the role named `my_admin_role`, +which allows this user to submit requests as the `analyst_user`. + +[source,console] +---- +POST /_security/user/admin_user?refresh=true +{ + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} +} +---- + +We can also create an analyst user and assign them the role named +`my_analyst_role`. + +[source,console] +---- +POST /_security/user/analyst_user?refresh=true +{ + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} +} +---- + +You can then authenticate to {es} as the `admin_user` or `analyst_user`. However, the `admin_user` could optionally submit requests on +behalf of the `analyst_user`. The following request authenticates to {es} with a +`Basic` authorization token and submits the request as the `analyst_user`: + +[source,sh] +---- +curl -s -X GET -H "Authorization: Basic YWRtaW5fdXNlcjpsMG5nLXI0bmQwbS1wQHNzdzByZA==" -H "es-security-runas-user: analyst_user" https://localhost:9200/_security/_authenticate +---- + +The response indicates that the `analyst_user` submitted this request, using the +`my_analyst_role` that's assigned to that user. When the `admin_user` submitted +the request, {es} authenticated that user, discarded their roles, and then used +the roles of the `run_as` user. + +[source,sh] +---- +{"username":"analyst_user","roles":["my_analyst_role"],"full_name":"Monday Jaffe","email":null, +"metadata":{"innovation":8},"enabled":true,"authentication_realm":{"name":"native", +"type":"native"},"lookup_realm":{"name":"native","type":"native"},"authentication_type":"realm"} +% +---- + +The `authentication_realm` and `lookup_realm` in the response both specify +the `native` realm because both the `admin_user` and `analyst_user` are from +that realm. If the two users are in different realms, the values for +`authentication_realm` and `lookup_realm` are different (such as `pki` and +`native`). +""" + ) + + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java deleted file mode 100644 index 534134e78d40b..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.gradle.api.InvalidUserDataException; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.elasticsearch.gradle.internal.doc.RestTestsFromSnippetsTask.replaceBlockQuote; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class RestTestFromSnippetsTaskTests { - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - @Test - public void testInvalidBlockQuote() { - String input = "\"foo\": \"\"\"bar\""; - expectedEx.expect(InvalidUserDataException.class); - expectedEx.expectMessage("Invalid block quote starting at 7 in:\n" + input); - replaceBlockQuote(input); - } - - @Test - public void testSimpleBlockQuote() { - assertEquals("\"foo\": \"bort baz\"", replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")); - } - - @Test - public void testMultipleBlockQuotes() { - assertEquals( - "\"foo\": \"bort baz\", \"bar\": \"other\"", - replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") - ); - } - - @Test - public void testEscapingInBlockQuote() { - assertEquals("\"foo\": \"bort\\\" baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")); - assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); - } - - @Test - public void testIsDocWriteRequest() { - assertTrue((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("doc-index/_search")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_cat")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview")); - } -} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java deleted file mode 100644 index 0acae6ca03297..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class SnippetsTaskTests { - - @Test - public void testMatchSource() { - SnippetsTask.Source source = SnippetsTask.matchSource("[source,console]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertNull(source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source, console, id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source.merge.styled,esql]"); - assertTrue(source.getMatches()); - assertEquals("esql", source.getLanguage()); - - source = SnippetsTask.matchSource("[source.merge.styled,foo-bar]"); - assertTrue(source.getMatches()); - assertEquals("foo-bar", source.getLanguage()); - } -} diff --git a/docs/changelog/103374.yaml b/docs/changelog/103374.yaml new file mode 100644 index 0000000000000..fcdee9185eb92 --- /dev/null +++ b/docs/changelog/103374.yaml @@ -0,0 +1,16 @@ +pr: 103374 +summary: Cut over stored fields to ZSTD for compression +area: Search +type: enhancement +issues: [] +highlight: + title: Stored fields are now compressed with ZStandard instead of LZ4/DEFLATE + body: |- + Stored fields are now compressed by splitting documents into blocks, which + are then compressed independently with ZStandard. `index.codec: default` + (default) uses blocks of at most 14kB or 128 documents compressed with level + 0, while `index.codec: best_compression` uses blocks of at most 240kB or + 2048 documents compressed at level 3. On most datasets that we tested + against, this yielded storage improvements in the order of 10%, slightly + faster indexing and similar retrieval latencies. + notable: true diff --git a/docs/changelog/106514.yaml b/docs/changelog/106514.yaml new file mode 100644 index 0000000000000..5b25f40db2742 --- /dev/null +++ b/docs/changelog/106514.yaml @@ -0,0 +1,6 @@ +pr: 106514 +summary: Add granular error list to alias action response +area: Indices APIs +type: feature +issues: + - 94478 diff --git a/docs/changelog/106989.yaml b/docs/changelog/106989.yaml new file mode 100644 index 0000000000000..47df5fe5b47d7 --- /dev/null +++ b/docs/changelog/106989.yaml @@ -0,0 +1,7 @@ +pr: 106989 +summary: Make force-stopping the transform always remove persistent task from cluster + state +area: Transform +type: bug +issues: + - 106811 diff --git a/docs/changelog/107041.yaml b/docs/changelog/107041.yaml new file mode 100644 index 0000000000000..b8b4f3d7c5690 --- /dev/null +++ b/docs/changelog/107041.yaml @@ -0,0 +1,6 @@ +pr: 107041 +summary: '`DocumentParsingObserver` to accept an `indexName` to allow skipping system + indices' +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/docs/changelog/107158.yaml b/docs/changelog/107158.yaml new file mode 100644 index 0000000000000..9589fe7e7264b --- /dev/null +++ b/docs/changelog/107158.yaml @@ -0,0 +1,5 @@ +pr: 107158 +summary: "ESQL: allow sorting by expressions and not only regular fields" +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/107232.yaml b/docs/changelog/107232.yaml new file mode 100644 index 0000000000000..1422848cb1c91 --- /dev/null +++ b/docs/changelog/107232.yaml @@ -0,0 +1,6 @@ +pr: 107232 +summary: Only trigger action once per thread +area: Transform +type: bug +issues: + - 107215 diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index 6ddd3602e1467..5b30501ed7c9d 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -121,6 +121,77 @@ POST _aliases // TEST[s/^/PUT _data_stream\/logs-nginx.access-prod\nPUT _data_stream\/logs-my_app-default\n/] // end::alias-multiple-actions-example[] +[discrete] +[[multiple-action-results]] +=== Multiple action results + +When using multiple actions, if some succeed and some fail, a list of per-action results will be returned. + +Consider a similar action list to the previous example, but now with an alias `log-non-existing`, which does not yet exist. +In this case, the `remove` action will fail, but the `add` action will succeed. +The response will contain the list `action_results`, with a result for every requested action. + +[source,console] +---- +POST _aliases +{ + "actions": [ + { + "remove": { + "index": "index1", + "alias": "logs-non-existing" + } + }, + { + "add": { + "index": "index2", + "alias": "logs-non-existing" + } + } + ] +} +---- +// TEST[s/^/PUT \/index1\nPUT \/index2\n/] + +The API returns the following result: + +[source,console-result] +-------------------------------------------------- +{ + "acknowledged": true, + "errors": true, + "action_results": [ + { + "action": { + "type": "remove", + "indices": [ "index1" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 404, + "error": { + "type": "aliases_not_found_exception", + "reason": "aliases [logs-non-existing] missing", + "resource.type": "aliases", + "resource.id": "logs-non-existing" + } + }, + { + "action": { + "type": "add", + "indices": [ "index2" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 200 + } + ] +} +-------------------------------------------------- + +Allowing the action list to succeed partially may not provide the desired result. +It may be more appropriate to set `must_exist` to `true`, which will cause the entire action +list to fail if a single action fails. + + [discrete] [[add-alias-at-creation]] === Add an alias at index creation diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index a055c278b41d9..1a32e64cedb1f 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,9 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. -+ -This parameter is only returned for successful operations. +`updated`. Other valid values are `noop` and `not_found`. `_shards`:: (object) diff --git a/docs/reference/esql/esql-functions-operators.asciidoc b/docs/reference/esql/esql-functions-operators.asciidoc index a1ad512fbe512..ddc077f3b8ff8 100644 --- a/docs/reference/esql/esql-functions-operators.asciidoc +++ b/docs/reference/esql/esql-functions-operators.asciidoc @@ -1,40 +1,71 @@ [[esql-functions-operators]] === {esql} functions and operators - ++++ Functions and operators ++++ {esql} provides a comprehensive set of functions and operators for working with data. -The functions are divided into the following categories: +The reference documentation is divided into the following categories: [[esql-functions]] -<>:: +==== Functions overview + +.*Aggregate functions* +[%collapsible] +==== include::functions/aggregation-functions.asciidoc[tag=agg_list] +==== -<>:: +.*Math functions* +[%collapsible] +==== include::functions/math-functions.asciidoc[tag=math_list] +==== -<>:: +.*String functions* +[%collapsible] +==== include::functions/string-functions.asciidoc[tag=string_list] +==== -<>:: +.*Date and time functions* +[%collapsible] +==== include::functions/date-time-functions.asciidoc[tag=date_list] +==== -<>:: +.*Spatial functions* +[%collapsible] +==== include::functions/spatial-functions.asciidoc[tag=spatial_list] +==== -<>:: +.*Type conversion functions* +[%collapsible] +==== include::functions/type-conversion-functions.asciidoc[tag=type_list] +==== -<>:: +.*Conditional functions and expressions* +[%collapsible] +==== include::functions/conditional-functions-and-expressions.asciidoc[tag=cond_list] +==== -<>:: +.*Multi value functions* +[%collapsible] +==== include::functions/mv-functions.asciidoc[tag=mv_list] +==== + +[[esql-operators-overview]] +==== Operators overview -<>:: +.*Operators* +[%collapsible] +==== include::functions/operators.asciidoc[tag=op_list] +==== include::functions/aggregation-functions.asciidoc[] include::functions/math-functions.asciidoc[] diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 29f61299cec30..421272f741602 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -240,7 +240,7 @@ include::{esql-specs}/eval.csv-spec[tag=gs-eval-stats-backticks] === Create a histogram To track statistics over time, {esql} enables you to create histograms using the -<> function. `AUTO_BUCKET` creates human-friendly bucket sizes +<> function. `BUCKET` creates human-friendly bucket sizes and returns a value for each row that corresponds to the resulting bucket the row falls into. @@ -248,22 +248,22 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket] +include::{esql-specs}/date.csv-spec[tag=gs-bucket] ---- -Combine `AUTO_BUCKET` with <> to create a histogram. For example, +Combine `BUCKET` with <> to create a histogram. For example, to count the number of events per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by-median] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by-median] ---- [discrete] diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index e4c873457b21b..77f5e79753fdd 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -1,11 +1,10 @@ [[esql-language]] -== Learning {esql} - +== {esql} reference ++++ -Learning {esql} +{esql} reference ++++ -Detailed information about the {esql} language: +Detailed reference documentation for the {esql} language: * <> * <> diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md index 7be4c70fbe6b0..35b852ba060f1 100644 --- a/docs/reference/esql/functions/README.md +++ b/docs/reference/esql/functions/README.md @@ -5,6 +5,8 @@ The files in these subdirectories are generated by ESQL's test suite: * `signature` - railroad diagram of the syntax to invoke each function * `types` - a table of each combination of support type for each parameter. These are generated from tests. * `layout` - a fully generated description for each function +* `kibana/definition` - function definitions for kibana's ESQL editor +* `kibana/docs` - the inline docs for kibana Most functions can use the generated docs generated in the `layout` directory. If we need something more custom for the function we can make a file in this diff --git a/docs/reference/esql/functions/auto_bucket.asciidoc b/docs/reference/esql/functions/bucket.asciidoc similarity index 62% rename from docs/reference/esql/functions/auto_bucket.asciidoc rename to docs/reference/esql/functions/bucket.asciidoc index 651ac168aa83a..e436a79d0ec1e 100644 --- a/docs/reference/esql/functions/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/bucket.asciidoc @@ -1,14 +1,12 @@ [discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -experimental::[] +[[esql-bucket]] +=== `BUCKET` *Syntax* [source,esql] ---- -AUTO_BUCKET(expression, buckets, from, to) +BUCKET(expression, buckets, from, to) ---- *Parameters* @@ -28,39 +26,39 @@ End of the range. Can be a number or a date expressed as a string. *Description* Creates human-friendly buckets and returns a value for each row that corresponds -to the resulting bucket the row falls into. +to the resulting bucket the row falls into. Using a target number of buckets, a start of a range, and an end of a range, -`AUTO_BUCKET` picks an appropriate bucket size to generate the target number of +`BUCKET` picks an appropriate bucket size to generate the target number of buckets or fewer. For example, asking for at most 20 buckets over a year results in monthly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth-result] |=== The goal isn't to provide *exactly* the target number of buckets, it's to pick a range that people are comfortable with that provides at most the target number of buckets. -Combine `AUTO_BUCKET` with +Combine `BUCKET` with <> to create a histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not create buckets that don't match any documents. +NOTE: `BUCKET` does not create buckets that don't match any documents. That's why this example is missing `1985-03-01` and other dates. Asking for more buckets can result in a smaller range. For example, asking for @@ -68,28 +66,28 @@ at most 100 buckets in a year results in weekly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not filter any rows. It only uses the provided range to +NOTE: `BUCKET` does not filter any rows. It only uses the provided range to pick a good bucket size. For rows with a value outside of the range, it returns a bucket value that corresponds to a bucket outside the range. Combine -`AUTO_BUCKET` with <> to filter rows. +`BUCKET` with <> to filter rows. -`AUTO_BUCKET` can also operate on numeric fields. For example, to create a +`BUCKET` can also operate on numeric fields. For example, to create a salary histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric-result] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric-result] |=== Unlike the earlier example that intentionally filters on a date range, you @@ -104,7 +102,7 @@ per hour: [source.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketLast24hr] +include::{esql-specs}/date.csv-spec[tag=docsBucketLast24hr] ---- Create monthly buckets for the year 1985, and calculate the average salary by @@ -112,9 +110,9 @@ hiring month: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg-result] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg-result] |=== diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index e9d6628c63894..149bdffb5ef07 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -8,7 +8,7 @@ {esql} supports these date-time functions: // tag::date_list[] -* experimental:[] <> +* <> * <> * <> * <> @@ -17,10 +17,10 @@ * <> // end::date_list[] -include::auto_bucket.asciidoc[] +include::bucket.asciidoc[] include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] include::date_parse.asciidoc[] -include::date_trunc.asciidoc[] +include::layout/date_trunc.asciidoc[] include::now.asciidoc[] diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/description/auto_bucket.asciidoc rename to docs/reference/esql/functions/description/bucket.asciidoc diff --git a/docs/reference/esql/functions/description/date_trunc.asciidoc b/docs/reference/esql/functions/description/date_trunc.asciidoc new file mode 100644 index 0000000000000..1fb874e3bd9cd --- /dev/null +++ b/docs/reference/esql/functions/description/date_trunc.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Rounds down a date to the closest interval. diff --git a/docs/reference/esql/functions/date_trunc.asciidoc b/docs/reference/esql/functions/examples/date_trunc.asciidoc similarity index 68% rename from docs/reference/esql/functions/date_trunc.asciidoc rename to docs/reference/esql/functions/examples/date_trunc.asciidoc index 4aa228dc14e65..d7cece9aff58b 100644 --- a/docs/reference/esql/functions/date_trunc.asciidoc +++ b/docs/reference/esql/functions/examples/date_trunc.asciidoc @@ -1,26 +1,4 @@ -[discrete] -[[esql-date_trunc]] -=== `DATE_TRUNC` - -*Syntax* - -[source,esql] ----- -DATE_TRUNC(interval, date) ----- - -*Parameters* - -`interval`:: -Interval, expressed using the <>. If `null`, the function returns `null`. - -`date`:: -Date expression. If `null`, the function returns `null`. - -*Description* - -Rounds down a date to the closest interval. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -32,10 +10,8 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] |=== - Combine `DATE_TRUNC` with <> to create date histograms. For example, the number of hires per year: - [source.merge.styled,esql] ---- include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] @@ -44,9 +20,7 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram-result] |=== - Or an hourly error rate: - [source.merge.styled,esql] ---- include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] @@ -55,3 +29,4 @@ include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] |=== include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] |=== + diff --git a/docs/reference/esql/functions/kibana/definition/abs.json b/docs/reference/esql/functions/kibana/definition/abs.json new file mode 100644 index 0000000000000..82c3c205d7512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/abs.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "abs", + "description" : "Returns the absolute value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW number = -1.0 \n| EVAL abs_number = ABS(number)", + "FROM employees\n| KEEP first_name, last_name, height\n| EVAL abs_height = ABS(0.0 - height)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/acos.json b/docs/reference/esql/functions/kibana/definition/acos.json new file mode 100644 index 0000000000000..6a6ab59278639 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/acos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "acos", + "description" : "Returns the arccosine of `n` as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL acos=ACOS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/asin.json b/docs/reference/esql/functions/kibana/definition/asin.json new file mode 100644 index 0000000000000..f5ebb817fff33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/asin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "asin", + "description" : "Returns the arcsine of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL asin=ASIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan.json b/docs/reference/esql/functions/kibana/definition/atan.json new file mode 100644 index 0000000000000..654a48b8ca76d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan", + "description" : "Returns the arctangent of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=12.9\n| EVAL atan=ATAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan2.json b/docs/reference/esql/functions/kibana/definition/atan2.json new file mode 100644 index 0000000000000..63940831241f7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan2.json @@ -0,0 +1,299 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan2", + "description" : "The angle between the positive x-axis and the ray from the\norigin to the point (x , y) in the Cartesian plane, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW y=12.9, x=.6\n| EVAL atan2=ATAN2(y, x)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json new file mode 100644 index 0000000000000..dda3f384424b4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -0,0 +1,1088 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "bucket", + "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json new file mode 100644 index 0000000000000..73bc215ac6ade --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "case", + "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "signatures" : [ + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "trueValue", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "falseValue", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ceil.json b/docs/reference/esql/functions/kibana/definition/ceil.json new file mode 100644 index 0000000000000..b8ac9ad55f31a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ceil.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ceil", + "description" : "Round a number up to the nearest integer.", + "note" : "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW a=1.8\n| EVAL a=CEIL(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json new file mode 100644 index 0000000000000..87feead06d091 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -0,0 +1,161 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "coalesce", + "description" : "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + } + ], + "examples" : [ + "ROW a=null, b=\"b\"\n| EVAL COALESCE(a, b)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/concat.json b/docs/reference/esql/functions/kibana/definition/concat.json new file mode 100644 index 0000000000000..bb1b84f67aff9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/concat.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "concat", + "description" : "Concatenates two or more strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cos.json b/docs/reference/esql/functions/kibana/definition/cos.json new file mode 100644 index 0000000000000..c7757fbd4071d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cos", + "description" : "Returns the cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cos=COS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cosh.json b/docs/reference/esql/functions/kibana/definition/cosh.json new file mode 100644 index 0000000000000..a34eee15be37e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cosh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cosh", + "description" : "Returns the hyperbolic cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cosh=COSH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json new file mode 100644 index 0000000000000..aa030ea163709 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_diff", + "description" : "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument", + "signatures" : [ + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json new file mode 100644 index 0000000000000..c5edf5ac14109 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_extract", + "description" : "Extracts parts of a date, like year, month, day, hour.", + "signatures" : [ + { + "params" : [ + { + "name" : "datePart", + "type" : "keyword", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "datePart", + "type" : "text", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json new file mode 100644 index 0000000000000..8807e5d330f84 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_format", + "description" : "Returns a string representation of a date, in the provided format.", + "signatures" : [ + { + "params" : [ + { + "name" : "dateFormat", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_parse.json b/docs/reference/esql/functions/kibana/definition/date_parse.json new file mode 100644 index 0000000000000..85bce19532020 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_parse.json @@ -0,0 +1,62 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_parse", + "description" : "Parses a string into a date value", + "signatures" : [ + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "keyword", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json new file mode 100644 index 0000000000000..3d8658c496529 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_trunc", + "description" : "Rounds down a date to the closest interval.", + "signatures" : [ + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ], + "examples" : [ + "FROM employees\n| KEEP first_name, last_name, hire_date\n| EVAL year_hired = DATE_TRUNC(1 year, hire_date)", + "FROM employees\n| EVAL year = DATE_TRUNC(1 year, hire_date)\n| STATS hires = COUNT(emp_no) BY year\n| SORT year", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/e.json b/docs/reference/esql/functions/kibana/definition/e.json new file mode 100644 index 0000000000000..97d33b752d042 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/e.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "e", + "description" : "Euler’s number.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ends_with.json b/docs/reference/esql/functions/kibana/definition/ends_with.json new file mode 100644 index 0000000000000..66f4c7404905c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ends_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ends_with", + "description" : "Returns a boolean that indicates whether a keyword string ends with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/floor.json b/docs/reference/esql/functions/kibana/definition/floor.json new file mode 100644 index 0000000000000..18ab8031558bd --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/floor.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "floor", + "description" : "Round a number down to the nearest integer.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json new file mode 100644 index 0000000000000..f72f54708c6b1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "greatest", + "description" : "Returns the maximum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json new file mode 100644 index 0000000000000..66efedc0c9fe5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "least", + "description" : "Returns the minimum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/left.json b/docs/reference/esql/functions/kibana/definition/left.json new file mode 100644 index 0000000000000..bcda92b887bb0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/left.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "left", + "description" : "Returns the substring that extracts 'length' chars from 'string' starting from the left.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "FROM employees\n| KEEP last_name\n| EVAL left = LEFT(last_name, 3)\n| SORT last_name ASC\n| LIMIT 5" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/length.json b/docs/reference/esql/functions/kibana/definition/length.json new file mode 100644 index 0000000000000..a42656b71d471 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/length.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "length", + "description" : "Returns the character length of a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/locate.json b/docs/reference/esql/functions/kibana/definition/locate.json new file mode 100644 index 0000000000000..9629b81820f8a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/locate.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "locate", + "description" : "Returns an integer that indicates the position of a keyword substring within another string", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log.json b/docs/reference/esql/functions/kibana/definition/log.json new file mode 100644 index 0000000000000..0edafefc4dd1a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log.json @@ -0,0 +1,348 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log", + "description" : "Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double.\n\nLogs of zero, negative numbers, and base of one return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW base = 2.0, value = 8.0\n| EVAL s = LOG(base, value)", + "row value = 100\n| EVAL s = LOG(value);" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log10.json b/docs/reference/esql/functions/kibana/definition/log10.json new file mode 100644 index 0000000000000..ca506b0df33e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log10.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log10", + "description" : "Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double.\n\nLogs of 0 and negative numbers return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 1000.0 \n| EVAL s = LOG10(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json new file mode 100644 index 0000000000000..bcf51f6b9e9fb --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ltrim", + "description" : "Removes leading whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_avg.json b/docs/reference/esql/functions/kibana/definition/mv_avg.json new file mode 100644 index 0000000000000..2fa14f0c91d51 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_avg.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_avg", + "description" : "Converts a multivalued field into a single valued field containing the average of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_concat.json b/docs/reference/esql/functions/kibana/definition/mv_concat.json new file mode 100644 index 0000000000000..1f6936857bcff --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_concat.json @@ -0,0 +1,80 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_concat", + "description" : "Reduce a multivalued string field to a single valued field by concatenating all values.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json new file mode 100644 index 0000000000000..d27821451899b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_count", + "description" : "Reduce a multivalued field to a single valued field containing the count of values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json new file mode 100644 index 0000000000000..c0f02d9febc42 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -0,0 +1,116 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_dedupe", + "description" : "Remove duplicate values from a multivalued field.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json new file mode 100644 index 0000000000000..d73b3ae002be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_first", + "description" : "Reduce a multivalued field to a single valued field containing the first value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json new file mode 100644 index 0000000000000..0484bfa0b488b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_last", + "description" : "Reduce a multivalued field to a single valued field containing the last value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json new file mode 100644 index 0000000000000..62a6e15f3346a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_max", + "description" : "Reduce a multivalued field to a single valued field containing the maximum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_median.json b/docs/reference/esql/functions/kibana/definition/mv_median.json new file mode 100644 index 0000000000000..a6d79f7e6f0a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_median.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_median", + "description" : "Converts a multivalued field into a single valued field containing the median value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json new file mode 100644 index 0000000000000..8a6f485aedc57 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_min", + "description" : "Reduce a multivalued field to a single valued field containing the minimum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json new file mode 100644 index 0000000000000..6d3aa873d8d01 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -0,0 +1,320 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_slice", + "description" : "Returns a subset of the multivalued field using the start and end index values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json new file mode 100644 index 0000000000000..f647d51a2cfaf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -0,0 +1,170 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sort", + "description" : "Sorts a multivalued field in lexicographical order.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sum.json b/docs/reference/esql/functions/kibana/definition/mv_sum.json new file mode 100644 index 0000000000000..25f687efed675 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sum.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sum", + "description" : "Converts a multivalued field into a single valued field containing the sum of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json new file mode 100644 index 0000000000000..7fabc0e56f12d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_zip", + "description" : "Combines the values from two multivalued fields with a delimiter that joins them together.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json new file mode 100644 index 0000000000000..d1d700d2011ee --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pi", + "description" : "The ratio of a circle’s circumference to its diameter.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pow.json b/docs/reference/esql/functions/kibana/definition/pow.json new file mode 100644 index 0000000000000..9970a45847cc7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pow.json @@ -0,0 +1,296 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pow", + "description" : "Returns the value of a base raised to the power of an exponent.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/replace.json b/docs/reference/esql/functions/kibana/definition/replace.json new file mode 100644 index 0000000000000..cf54b296555a4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/replace.json @@ -0,0 +1,200 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "replace", + "description" : "The function substitutes in the string any match of the regular expression with the replacement string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/right.json b/docs/reference/esql/functions/kibana/definition/right.json new file mode 100644 index 0000000000000..58d081c3782bf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/right.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "right", + "description" : "Return the substring that extracts length chars from the string starting from the right.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json new file mode 100644 index 0000000000000..e12672d8ee6e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -0,0 +1,26 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "round", + "description" : "Rounds a number to the closest number with the specified number of digits.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "The numeric value to round" + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0." + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json new file mode 100644 index 0000000000000..586d53a3f84da --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "rtrim", + "description" : "Removes trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/signum.json b/docs/reference/esql/functions/kibana/definition/signum.json new file mode 100644 index 0000000000000..b8343283f457e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/signum.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "signum", + "description" : "Returns the sign of the given number.\nIt returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 100.0\n| EVAL s = SIGNUM(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sin.json b/docs/reference/esql/functions/kibana/definition/sin.json new file mode 100644 index 0000000000000..8d092bd0c15a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sin", + "description" : "Returns ths Sine trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sin=SIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sinh.json b/docs/reference/esql/functions/kibana/definition/sinh.json new file mode 100644 index 0000000000000..2261b18134f6c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sinh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sinh", + "description" : "Returns the hyperbolic sine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sinh=SINH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/split.json b/docs/reference/esql/functions/kibana/definition/split.json new file mode 100644 index 0000000000000..b64def1b813fc --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/split.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "split", + "description" : "Split a single valued string into multiple strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sqrt.json b/docs/reference/esql/functions/kibana/definition/sqrt.json new file mode 100644 index 0000000000000..6036fcfd113f3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sqrt.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sqrt", + "description" : "Returns the square root of a number.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json new file mode 100644 index 0000000000000..f4f8003917908 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_contains", + "description" : "Returns whether the first geometry contains the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE(\"POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json new file mode 100644 index 0000000000000..98647b63ff18f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_disjoint", + "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE(\"POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json new file mode 100644 index 0000000000000..ba619fe57ecf5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_intersects", + "description" : "Returns whether the two geometries or geometry columns intersect.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airports\n| WHERE ST_INTERSECTS(location, TO_GEOSHAPE(\"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\"))" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json new file mode 100644 index 0000000000000..ee98337441ab7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_within", + "description" : "Returns whether the first geometry is within the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE(\"POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json new file mode 100644 index 0000000000000..57598b3470e11 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_x", + "description" : "Extracts the x-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json new file mode 100644 index 0000000000000..0dacaa56bb8de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_y", + "description" : "Extracts the y-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/starts_with.json b/docs/reference/esql/functions/kibana/definition/starts_with.json new file mode 100644 index 0000000000000..918940d110651 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/starts_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "starts_with", + "description" : "Returns a boolean that indicates whether a keyword string starts with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/substring.json b/docs/reference/esql/functions/kibana/definition/substring.json new file mode 100644 index 0000000000000..89c62258f4516 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/substring.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "substring", + "description" : "Returns a substring of a string, specified by a start position and an optional length", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tan.json b/docs/reference/esql/functions/kibana/definition/tan.json new file mode 100644 index 0000000000000..7498964dc1a2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tan", + "description" : "Returns the Tangent trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tan=TAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tanh.json b/docs/reference/esql/functions/kibana/definition/tanh.json new file mode 100644 index 0000000000000..507f62d394be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tanh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tanh", + "description" : "Returns the Tangent hyperbolic function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tanh=TANH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tau.json b/docs/reference/esql/functions/kibana/definition/tau.json new file mode 100644 index 0000000000000..6ad20f86be4de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tau.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tau", + "description" : "The ratio of a circle’s circumference to its radius.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_boolean.json b/docs/reference/esql/functions/kibana/definition/to_boolean.json new file mode 100644 index 0000000000000..314df3f7a4ca9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_boolean.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_boolean", + "description" : "Converts an input value to a boolean value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json new file mode 100644 index 0000000000000..59b0c0b38f850 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianpoint", + "description" : "Converts an input value to a point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json new file mode 100644 index 0000000000000..75c1f05bd7738 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianshape", + "description" : "Converts an input value to a shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json new file mode 100644 index 0000000000000..e2b10e54f4a29 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_datetime", + "description" : "Converts an input value to a date value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_degrees.json b/docs/reference/esql/functions/kibana/definition/to_degrees.json new file mode 100644 index 0000000000000..7652254fcebe1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_degrees.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_degrees", + "description" : "Converts a number in radians to degrees.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_double.json b/docs/reference/esql/functions/kibana/definition/to_double.json new file mode 100644 index 0000000000000..7fad85d7be129 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_double.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_double", + "description" : "Converts an input value to a double value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geopoint.json b/docs/reference/esql/functions/kibana/definition/to_geopoint.json new file mode 100644 index 0000000000000..b8a7ca9b9a19f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geopoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geopoint", + "description" : "Converts an input value to a geo_point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geoshape.json b/docs/reference/esql/functions/kibana/definition/to_geoshape.json new file mode 100644 index 0000000000000..d3dee5812510c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geoshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geoshape", + "description" : "Converts an input value to a geo_shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_integer.json b/docs/reference/esql/functions/kibana/definition/to_integer.json new file mode 100644 index 0000000000000..3e8a7897bda7b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_integer.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_integer", + "description" : "Converts an input value to an integer value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_ip.json b/docs/reference/esql/functions/kibana/definition/to_ip.json new file mode 100644 index 0000000000000..f99ef65752559 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_ip.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_ip", + "description" : "Converts an input string to an IP value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json new file mode 100644 index 0000000000000..56fd5dc83e721 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_long", + "description" : "Converts an input value to a long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json new file mode 100644 index 0000000000000..4b3121da437ed --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_lower", + "description" : "Returns a new string representing the input string converted to lower case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_radians.json b/docs/reference/esql/functions/kibana/definition/to_radians.json new file mode 100644 index 0000000000000..8b8fc287318ab --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_radians.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_radians", + "description" : "Converts a number in degrees to radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json new file mode 100644 index 0000000000000..bb77c68bf59e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_string", + "description" : "Converts a field into a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json new file mode 100644 index 0000000000000..923294c19ffba --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_unsigned_long", + "description" : "Converts an input value to an unsigned long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json new file mode 100644 index 0000000000000..d5ecb1f47206f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_upper", + "description" : "Returns a new string representing the input string converted to upper case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_version.json b/docs/reference/esql/functions/kibana/definition/to_version.json new file mode 100644 index 0000000000000..6076f8dfd70c0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_version.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_version", + "description" : "Converts an input string to a version value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json new file mode 100644 index 0000000000000..8e194df0eb84d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "trim", + "description" : "Removes leading and trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/abs.md b/docs/reference/esql/functions/kibana/docs/abs.md new file mode 100644 index 0000000000000..9dc2c5c76f4f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/abs.md @@ -0,0 +1,11 @@ + + +### ABS +Returns the absolute value. + +``` +ROW number = -1.0 +| EVAL abs_number = ABS(number) +``` diff --git a/docs/reference/esql/functions/kibana/docs/acos.md b/docs/reference/esql/functions/kibana/docs/acos.md new file mode 100644 index 0000000000000..19ae2522d48b4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/acos.md @@ -0,0 +1,11 @@ + + +### ACOS +Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL acos=ACOS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/asin.md b/docs/reference/esql/functions/kibana/docs/asin.md new file mode 100644 index 0000000000000..c072ac19b5b92 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/asin.md @@ -0,0 +1,12 @@ + + +### ASIN +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL asin=ASIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan.md b/docs/reference/esql/functions/kibana/docs/atan.md new file mode 100644 index 0000000000000..62686f2fbab2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan.md @@ -0,0 +1,12 @@ + + +### ATAN +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=12.9 +| EVAL atan=ATAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan2.md b/docs/reference/esql/functions/kibana/docs/atan2.md new file mode 100644 index 0000000000000..0000c532236d9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan2.md @@ -0,0 +1,12 @@ + + +### ATAN2 +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the +origin to the point (x , y) in the Cartesian plane, expressed in radians. + +``` +ROW y=12.9, x=.6 +| EVAL atan2=ATAN2(y, x) +``` diff --git a/docs/reference/esql/functions/kibana/docs/bucket.md b/docs/reference/esql/functions/kibana/docs/bucket.md new file mode 100644 index 0000000000000..6ebfe7de5527d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/bucket.md @@ -0,0 +1,8 @@ + + +### BUCKET +Creates human-friendly buckets and returns a datetime value +for each row that corresponds to the resulting bucket the row falls into. + diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md new file mode 100644 index 0000000000000..e1494a5c2af8c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -0,0 +1,8 @@ + + +### CASE +Accepts pairs of conditions and values. +The function returns the value that belongs to the first condition that evaluates to true. + diff --git a/docs/reference/esql/functions/kibana/docs/ceil.md b/docs/reference/esql/functions/kibana/docs/ceil.md new file mode 100644 index 0000000000000..812b139206c35 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ceil.md @@ -0,0 +1,12 @@ + + +### CEIL +Round a number up to the nearest integer. + +``` +ROW a=1.8 +| EVAL a=CEIL(a) +``` +Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. diff --git a/docs/reference/esql/functions/kibana/docs/coalesce.md b/docs/reference/esql/functions/kibana/docs/coalesce.md new file mode 100644 index 0000000000000..89cca3f3a286a --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/coalesce.md @@ -0,0 +1,11 @@ + + +### COALESCE +Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. + +``` +ROW a=null, b="b" +| EVAL COALESCE(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/concat.md b/docs/reference/esql/functions/kibana/docs/concat.md new file mode 100644 index 0000000000000..9c30d978370dc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/concat.md @@ -0,0 +1,7 @@ + + +### CONCAT +Concatenates two or more strings. + diff --git a/docs/reference/esql/functions/kibana/docs/cos.md b/docs/reference/esql/functions/kibana/docs/cos.md new file mode 100644 index 0000000000000..9e8abebaddb89 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cos.md @@ -0,0 +1,11 @@ + + +### COS +Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cos=COS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/cosh.md b/docs/reference/esql/functions/kibana/docs/cosh.md new file mode 100644 index 0000000000000..b8fae70ae2eed --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cosh.md @@ -0,0 +1,11 @@ + + +### COSH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cosh=COSH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/date_diff.md b/docs/reference/esql/functions/kibana/docs/date_diff.md new file mode 100644 index 0000000000000..8d33e21d2f92c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_diff.md @@ -0,0 +1,7 @@ + + +### DATE_DIFF +Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument + diff --git a/docs/reference/esql/functions/kibana/docs/date_extract.md b/docs/reference/esql/functions/kibana/docs/date_extract.md new file mode 100644 index 0000000000000..49eb2391c188e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_extract.md @@ -0,0 +1,7 @@ + + +### DATE_EXTRACT +Extracts parts of a date, like year, month, day, hour. + diff --git a/docs/reference/esql/functions/kibana/docs/date_format.md b/docs/reference/esql/functions/kibana/docs/date_format.md new file mode 100644 index 0000000000000..fbf7fcbf0cb48 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_format.md @@ -0,0 +1,7 @@ + + +### DATE_FORMAT +Returns a string representation of a date, in the provided format. + diff --git a/docs/reference/esql/functions/kibana/docs/date_parse.md b/docs/reference/esql/functions/kibana/docs/date_parse.md new file mode 100644 index 0000000000000..8cf0769c38f3b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_parse.md @@ -0,0 +1,7 @@ + + +### DATE_PARSE +Parses a string into a date value + diff --git a/docs/reference/esql/functions/kibana/docs/date_trunc.md b/docs/reference/esql/functions/kibana/docs/date_trunc.md new file mode 100644 index 0000000000000..6aa81ebbac3c3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_trunc.md @@ -0,0 +1,12 @@ + + +### DATE_TRUNC +Rounds down a date to the closest interval. + +``` +FROM employees +| KEEP first_name, last_name, hire_date +| EVAL year_hired = DATE_TRUNC(1 year, hire_date) +``` diff --git a/docs/reference/esql/functions/kibana/docs/e.md b/docs/reference/esql/functions/kibana/docs/e.md new file mode 100644 index 0000000000000..da85eadf2e74e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/e.md @@ -0,0 +1,7 @@ + + +### E +Euler’s number. + diff --git a/docs/reference/esql/functions/kibana/docs/ends_with.md b/docs/reference/esql/functions/kibana/docs/ends_with.md new file mode 100644 index 0000000000000..74f02c732edef --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ends_with.md @@ -0,0 +1,7 @@ + + +### ENDS_WITH +Returns a boolean that indicates whether a keyword string ends with another string + diff --git a/docs/reference/esql/functions/kibana/docs/floor.md b/docs/reference/esql/functions/kibana/docs/floor.md new file mode 100644 index 0000000000000..a0a095525e08d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/floor.md @@ -0,0 +1,7 @@ + + +### FLOOR +Round a number down to the nearest integer. + diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md new file mode 100644 index 0000000000000..3db0c9ed87aa5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -0,0 +1,7 @@ + + +### GREATEST +Returns the maximum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md new file mode 100644 index 0000000000000..ff2c19592c8e1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -0,0 +1,7 @@ + + +### LEAST +Returns the minimum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/left.md b/docs/reference/esql/functions/kibana/docs/left.md new file mode 100644 index 0000000000000..73b79f7976512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/left.md @@ -0,0 +1,14 @@ + + +### LEFT +Returns the substring that extracts 'length' chars from 'string' starting from the left. + +``` +FROM employees +| KEEP last_name +| EVAL left = LEFT(last_name, 3) +| SORT last_name ASC +| LIMIT 5 +``` diff --git a/docs/reference/esql/functions/kibana/docs/length.md b/docs/reference/esql/functions/kibana/docs/length.md new file mode 100644 index 0000000000000..bb1cefd390c71 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/length.md @@ -0,0 +1,7 @@ + + +### LENGTH +Returns the character length of a string. + diff --git a/docs/reference/esql/functions/kibana/docs/locate.md b/docs/reference/esql/functions/kibana/docs/locate.md new file mode 100644 index 0000000000000..0b4d4c625c17e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/locate.md @@ -0,0 +1,7 @@ + + +### LOCATE +Returns an integer that indicates the position of a keyword substring within another string + diff --git a/docs/reference/esql/functions/kibana/docs/log.md b/docs/reference/esql/functions/kibana/docs/log.md new file mode 100644 index 0000000000000..7ac136d31f720 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log.md @@ -0,0 +1,13 @@ + + +### LOG +Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. + +Logs of zero, negative numbers, and base of one return `null` as well as a warning. + +``` +ROW base = 2.0, value = 8.0 +| EVAL s = LOG(base, value) +``` diff --git a/docs/reference/esql/functions/kibana/docs/log10.md b/docs/reference/esql/functions/kibana/docs/log10.md new file mode 100644 index 0000000000000..23ec30643e51e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log10.md @@ -0,0 +1,13 @@ + + +### LOG10 +Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. + +Logs of 0 and negative numbers return `null` as well as a warning. + +``` +ROW d = 1000.0 +| EVAL s = LOG10(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/ltrim.md b/docs/reference/esql/functions/kibana/docs/ltrim.md new file mode 100644 index 0000000000000..33fe7b8da1b6f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ltrim.md @@ -0,0 +1,7 @@ + + +### LTRIM +Removes leading whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_avg.md b/docs/reference/esql/functions/kibana/docs/mv_avg.md new file mode 100644 index 0000000000000..73636e07fa6e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_avg.md @@ -0,0 +1,7 @@ + + +### MV_AVG +Converts a multivalued field into a single valued field containing the average of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_concat.md b/docs/reference/esql/functions/kibana/docs/mv_concat.md new file mode 100644 index 0000000000000..f8092e47aaed0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_concat.md @@ -0,0 +1,7 @@ + + +### MV_CONCAT +Reduce a multivalued string field to a single valued field by concatenating all values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_count.md b/docs/reference/esql/functions/kibana/docs/mv_count.md new file mode 100644 index 0000000000000..ceea555d0d05c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_count.md @@ -0,0 +1,7 @@ + + +### MV_COUNT +Reduce a multivalued field to a single valued field containing the count of values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_dedupe.md b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md new file mode 100644 index 0000000000000..6968c4dd9b3a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md @@ -0,0 +1,7 @@ + + +### MV_DEDUPE +Remove duplicate values from a multivalued field. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_first.md b/docs/reference/esql/functions/kibana/docs/mv_first.md new file mode 100644 index 0000000000000..6ed8bb7570a93 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_first.md @@ -0,0 +1,7 @@ + + +### MV_FIRST +Reduce a multivalued field to a single valued field containing the first value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_last.md b/docs/reference/esql/functions/kibana/docs/mv_last.md new file mode 100644 index 0000000000000..5b68b84b4393f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_last.md @@ -0,0 +1,7 @@ + + +### MV_LAST +Reduce a multivalued field to a single valued field containing the last value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_max.md b/docs/reference/esql/functions/kibana/docs/mv_max.md new file mode 100644 index 0000000000000..acb29f7a592f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_max.md @@ -0,0 +1,7 @@ + + +### MV_MAX +Reduce a multivalued field to a single valued field containing the maximum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_median.md b/docs/reference/esql/functions/kibana/docs/mv_median.md new file mode 100644 index 0000000000000..81de2c3b2c689 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_median.md @@ -0,0 +1,7 @@ + + +### MV_MEDIAN +Converts a multivalued field into a single valued field containing the median value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_min.md b/docs/reference/esql/functions/kibana/docs/mv_min.md new file mode 100644 index 0000000000000..637211487a972 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_min.md @@ -0,0 +1,7 @@ + + +### MV_MIN +Reduce a multivalued field to a single valued field containing the minimum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_slice.md b/docs/reference/esql/functions/kibana/docs/mv_slice.md new file mode 100644 index 0000000000000..7bbf36f67079d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_slice.md @@ -0,0 +1,7 @@ + + +### MV_SLICE +Returns a subset of the multivalued field using the start and end index values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sort.md b/docs/reference/esql/functions/kibana/docs/mv_sort.md new file mode 100644 index 0000000000000..65a74d0455f4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sort.md @@ -0,0 +1,7 @@ + + +### MV_SORT +Sorts a multivalued field in lexicographical order. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sum.md b/docs/reference/esql/functions/kibana/docs/mv_sum.md new file mode 100644 index 0000000000000..a2b1bfb8ac481 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sum.md @@ -0,0 +1,7 @@ + + +### MV_SUM +Converts a multivalued field into a single valued field containing the sum of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_zip.md b/docs/reference/esql/functions/kibana/docs/mv_zip.md new file mode 100644 index 0000000000000..b6de218ecb45b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_zip.md @@ -0,0 +1,7 @@ + + +### MV_ZIP +Combines the values from two multivalued fields with a delimiter that joins them together. + diff --git a/docs/reference/esql/functions/kibana/docs/pi.md b/docs/reference/esql/functions/kibana/docs/pi.md new file mode 100644 index 0000000000000..f796ace56607d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pi.md @@ -0,0 +1,7 @@ + + +### PI +The ratio of a circle’s circumference to its diameter. + diff --git a/docs/reference/esql/functions/kibana/docs/pow.md b/docs/reference/esql/functions/kibana/docs/pow.md new file mode 100644 index 0000000000000..6cb9139dd91cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pow.md @@ -0,0 +1,7 @@ + + +### POW +Returns the value of a base raised to the power of an exponent. + diff --git a/docs/reference/esql/functions/kibana/docs/replace.md b/docs/reference/esql/functions/kibana/docs/replace.md new file mode 100644 index 0000000000000..9744a9ad7244b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/replace.md @@ -0,0 +1,7 @@ + + +### REPLACE +The function substitutes in the string any match of the regular expression with the replacement string. + diff --git a/docs/reference/esql/functions/kibana/docs/right.md b/docs/reference/esql/functions/kibana/docs/right.md new file mode 100644 index 0000000000000..6e211ae079f62 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/right.md @@ -0,0 +1,7 @@ + + +### RIGHT +Return the substring that extracts length chars from the string starting from the right. + diff --git a/docs/reference/esql/functions/kibana/docs/round.md b/docs/reference/esql/functions/kibana/docs/round.md new file mode 100644 index 0000000000000..2f8fd0864badf --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/round.md @@ -0,0 +1,7 @@ + + +### ROUND +Rounds a number to the closest number with the specified number of digits. + diff --git a/docs/reference/esql/functions/kibana/docs/rtrim.md b/docs/reference/esql/functions/kibana/docs/rtrim.md new file mode 100644 index 0000000000000..fc5636e40e804 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/rtrim.md @@ -0,0 +1,7 @@ + + +### RTRIM +Removes trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/signum.md b/docs/reference/esql/functions/kibana/docs/signum.md new file mode 100644 index 0000000000000..f2e66b84c69c8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/signum.md @@ -0,0 +1,12 @@ + + +### SIGNUM +Returns the sign of the given number. +It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. + +``` +ROW d = 100.0 +| EVAL s = SIGNUM(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sin.md b/docs/reference/esql/functions/kibana/docs/sin.md new file mode 100644 index 0000000000000..a87b4e4f452af --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sin.md @@ -0,0 +1,11 @@ + + +### SIN +Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL sin=SIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sinh.md b/docs/reference/esql/functions/kibana/docs/sinh.md new file mode 100644 index 0000000000000..81e8d9fd473d5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sinh.md @@ -0,0 +1,11 @@ + + +### SINH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. + +``` +ROW a=1.8 +| EVAL sinh=SINH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/split.md b/docs/reference/esql/functions/kibana/docs/split.md new file mode 100644 index 0000000000000..d06d8857967f4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/split.md @@ -0,0 +1,7 @@ + + +### SPLIT +Split a single valued string into multiple strings. + diff --git a/docs/reference/esql/functions/kibana/docs/sqrt.md b/docs/reference/esql/functions/kibana/docs/sqrt.md new file mode 100644 index 0000000000000..6e52bfed4037b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sqrt.md @@ -0,0 +1,7 @@ + + +### SQRT +Returns the square root of a number. + diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md new file mode 100644 index 0000000000000..6e23bb9b0f116 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -0,0 +1,12 @@ + + +### ST_CONTAINS +Returns whether the first geometry contains the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE("POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md new file mode 100644 index 0000000000000..7cf66b168bd70 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -0,0 +1,12 @@ + + +### ST_DISJOINT +Returns whether the two geometries or geometry columns are disjoint. + +``` +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md new file mode 100644 index 0000000000000..e4db33429dbe3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -0,0 +1,11 @@ + + +### ST_INTERSECTS +Returns whether the two geometries or geometry columns intersect. + +``` +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md new file mode 100644 index 0000000000000..cbb3ae5ee9aca --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -0,0 +1,12 @@ + + +### ST_WITHIN +Returns whether the first geometry is within the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE("POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md new file mode 100644 index 0000000000000..af2f4de1487cd --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -0,0 +1,7 @@ + + +### ST_X +Extracts the x-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md new file mode 100644 index 0000000000000..575a5bd3c7d33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -0,0 +1,7 @@ + + +### ST_Y +Extracts the y-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/starts_with.md b/docs/reference/esql/functions/kibana/docs/starts_with.md new file mode 100644 index 0000000000000..5af544c855051 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/starts_with.md @@ -0,0 +1,7 @@ + + +### STARTS_WITH +Returns a boolean that indicates whether a keyword string starts with another string + diff --git a/docs/reference/esql/functions/kibana/docs/substring.md b/docs/reference/esql/functions/kibana/docs/substring.md new file mode 100644 index 0000000000000..d1d9c696f7813 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/substring.md @@ -0,0 +1,7 @@ + + +### SUBSTRING +Returns a substring of a string, specified by a start position and an optional length + diff --git a/docs/reference/esql/functions/kibana/docs/tan.md b/docs/reference/esql/functions/kibana/docs/tan.md new file mode 100644 index 0000000000000..edfb4210f7dd2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tan.md @@ -0,0 +1,11 @@ + + +### TAN +Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL tan=TAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tanh.md b/docs/reference/esql/functions/kibana/docs/tanh.md new file mode 100644 index 0000000000000..d3d8c7d4e9196 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tanh.md @@ -0,0 +1,11 @@ + + +### TANH +Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. + +``` +ROW a=1.8 +| EVAL tanh=TANH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tau.md b/docs/reference/esql/functions/kibana/docs/tau.md new file mode 100644 index 0000000000000..9a530e61dd342 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tau.md @@ -0,0 +1,7 @@ + + +### TAU +The ratio of a circle’s circumference to its radius. + diff --git a/docs/reference/esql/functions/kibana/docs/to_boolean.md b/docs/reference/esql/functions/kibana/docs/to_boolean.md new file mode 100644 index 0000000000000..9c1bd747d168f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_boolean.md @@ -0,0 +1,7 @@ + + +### TO_BOOLEAN +Converts an input value to a boolean value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md new file mode 100644 index 0000000000000..dbaa76d1d23e0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANPOINT +Converts an input value to a point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md new file mode 100644 index 0000000000000..e3fd29e8f9907 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANSHAPE +Converts an input value to a shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_datetime.md b/docs/reference/esql/functions/kibana/docs/to_datetime.md new file mode 100644 index 0000000000000..8326866c7166d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_datetime.md @@ -0,0 +1,7 @@ + + +### TO_DATETIME +Converts an input value to a date value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_degrees.md b/docs/reference/esql/functions/kibana/docs/to_degrees.md new file mode 100644 index 0000000000000..dc5e36a592b2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_degrees.md @@ -0,0 +1,7 @@ + + +### TO_DEGREES +Converts a number in radians to degrees. + diff --git a/docs/reference/esql/functions/kibana/docs/to_double.md b/docs/reference/esql/functions/kibana/docs/to_double.md new file mode 100644 index 0000000000000..4f531e1c8fdde --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_double.md @@ -0,0 +1,7 @@ + + +### TO_DOUBLE +Converts an input value to a double value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geopoint.md b/docs/reference/esql/functions/kibana/docs/to_geopoint.md new file mode 100644 index 0000000000000..7f9b8ca59bc8f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geopoint.md @@ -0,0 +1,7 @@ + + +### TO_GEOPOINT +Converts an input value to a geo_point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geoshape.md b/docs/reference/esql/functions/kibana/docs/to_geoshape.md new file mode 100644 index 0000000000000..cdfbdc5b6ffd9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geoshape.md @@ -0,0 +1,7 @@ + + +### TO_GEOSHAPE +Converts an input value to a geo_shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_integer.md b/docs/reference/esql/functions/kibana/docs/to_integer.md new file mode 100644 index 0000000000000..ad04ecbd1e304 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_integer.md @@ -0,0 +1,7 @@ + + +### TO_INTEGER +Converts an input value to an integer value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_ip.md b/docs/reference/esql/functions/kibana/docs/to_ip.md new file mode 100644 index 0000000000000..47d06e9ab755e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_ip.md @@ -0,0 +1,7 @@ + + +### TO_IP +Converts an input string to an IP value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_long.md b/docs/reference/esql/functions/kibana/docs/to_long.md new file mode 100644 index 0000000000000..c19273376bd4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_long.md @@ -0,0 +1,7 @@ + + +### TO_LONG +Converts an input value to a long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_lower.md b/docs/reference/esql/functions/kibana/docs/to_lower.md new file mode 100644 index 0000000000000..f63926ba13825 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_lower.md @@ -0,0 +1,7 @@ + + +### TO_LOWER +Returns a new string representing the input string converted to lower case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_radians.md b/docs/reference/esql/functions/kibana/docs/to_radians.md new file mode 100644 index 0000000000000..071d9ff05e0b6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_radians.md @@ -0,0 +1,7 @@ + + +### TO_RADIANS +Converts a number in degrees to radians. + diff --git a/docs/reference/esql/functions/kibana/docs/to_string.md b/docs/reference/esql/functions/kibana/docs/to_string.md new file mode 100644 index 0000000000000..a066f488363aa --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_string.md @@ -0,0 +1,7 @@ + + +### TO_STRING +Converts a field into a string. + diff --git a/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md new file mode 100644 index 0000000000000..fbe9e22215ee8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md @@ -0,0 +1,7 @@ + + +### TO_UNSIGNED_LONG +Converts an input value to an unsigned long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_upper.md b/docs/reference/esql/functions/kibana/docs/to_upper.md new file mode 100644 index 0000000000000..4c4f5fe02b646 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_upper.md @@ -0,0 +1,7 @@ + + +### TO_UPPER +Returns a new string representing the input string converted to upper case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_version.md b/docs/reference/esql/functions/kibana/docs/to_version.md new file mode 100644 index 0000000000000..23cd9fcb152a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_version.md @@ -0,0 +1,7 @@ + + +### TO_VERSION +Converts an input string to a version value. + diff --git a/docs/reference/esql/functions/kibana/docs/trim.md b/docs/reference/esql/functions/kibana/docs/trim.md new file mode 100644 index 0000000000000..2911abbf5e1a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/trim.md @@ -0,0 +1,7 @@ + + +### TRIM +Removes leading and trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc deleted file mode 100644 index 82e05ab5d215c..0000000000000 --- a/docs/reference/esql/functions/layout/auto_bucket.asciidoc +++ /dev/null @@ -1,14 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -[discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -*Syntax* - -[.text-center] -image::esql/functions/signature/auto_bucket.svg[Embedded,opts=inline] - -include::../parameters/auto_bucket.asciidoc[] -include::../description/auto_bucket.asciidoc[] -include::../types/auto_bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/bucket.asciidoc b/docs/reference/esql/functions/layout/bucket.asciidoc new file mode 100644 index 0000000000000..0445007237c8c --- /dev/null +++ b/docs/reference/esql/functions/layout/bucket.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-bucket]] +=== `BUCKET` + +*Syntax* + +[.text-center] +image::esql/functions/signature/bucket.svg[Embedded,opts=inline] + +include::../parameters/bucket.asciidoc[] +include::../description/bucket.asciidoc[] +include::../types/bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_trunc.asciidoc b/docs/reference/esql/functions/layout/date_trunc.asciidoc new file mode 100644 index 0000000000000..0bd9ce4b4dbe4 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_trunc.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_trunc]] +=== `DATE_TRUNC` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_trunc.svg[Embedded,opts=inline] + +include::../parameters/date_trunc.asciidoc[] +include::../description/date_trunc.asciidoc[] +include::../types/date_trunc.asciidoc[] +include::../examples/date_trunc.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/parameters/auto_bucket.asciidoc rename to docs/reference/esql/functions/parameters/bucket.asciidoc diff --git a/docs/reference/esql/functions/parameters/date_trunc.asciidoc b/docs/reference/esql/functions/parameters/date_trunc.asciidoc new file mode 100644 index 0000000000000..19f7cb6cd7c74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_trunc.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`interval`:: +Interval; expressed using the timespan literal syntax. + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/signature/auto_bucket.svg b/docs/reference/esql/functions/signature/auto_bucket.svg deleted file mode 100644 index 7da9a053825f1..0000000000000 --- a/docs/reference/esql/functions/signature/auto_bucket.svg +++ /dev/null @@ -1 +0,0 @@ -AUTO_BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/bucket.svg b/docs/reference/esql/functions/signature/bucket.svg new file mode 100644 index 0000000000000..f6662910c010d --- /dev/null +++ b/docs/reference/esql/functions/signature/bucket.svg @@ -0,0 +1 @@ +BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/case.svg b/docs/reference/esql/functions/signature/case.svg new file mode 100644 index 0000000000000..d6fd7da38aca6 --- /dev/null +++ b/docs/reference/esql/functions/signature/case.svg @@ -0,0 +1 @@ +CASE(condition,trueValue) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_trunc.svg b/docs/reference/esql/functions/signature/date_trunc.svg new file mode 100644 index 0000000000000..c82cd04ed5c88 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_trunc.svg @@ -0,0 +1 @@ +DATE_TRUNC(interval,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index b568ae1061bb5..273c508fc6f63 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -11,6 +11,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -25,6 +26,7 @@ include::concat.asciidoc[] include::layout/left.asciidoc[] include::length.asciidoc[] +include::layout/locate.asciidoc[] include::ltrim.asciidoc[] include::replace.asciidoc[] include::right.asciidoc[] diff --git a/docs/reference/esql/functions/types/bucket.asciidoc b/docs/reference/esql/functions/types/bucket.asciidoc new file mode 100644 index 0000000000000..cfe74ae25c3d0 --- /dev/null +++ b/docs/reference/esql/functions/types/bucket.asciidoc @@ -0,0 +1,44 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | buckets | from | to | result +datetime | integer | datetime | datetime | datetime +datetime | integer | datetime | keyword | datetime +datetime | integer | datetime | text | datetime +datetime | integer | keyword | datetime | datetime +datetime | integer | keyword | keyword | datetime +datetime | integer | keyword | text | datetime +datetime | integer | text | datetime | datetime +datetime | integer | text | keyword | datetime +datetime | integer | text | text | datetime +double | integer | double | double | double +double | integer | double | integer | double +double | integer | double | long | double +double | integer | integer | double | double +double | integer | integer | integer | double +double | integer | integer | long | double +double | integer | long | double | double +double | integer | long | integer | double +double | integer | long | long | double +integer | integer | double | double | double +integer | integer | double | integer | double +integer | integer | double | long | double +integer | integer | integer | double | double +integer | integer | integer | integer | double +integer | integer | integer | long | double +integer | integer | long | double | double +integer | integer | long | integer | double +integer | integer | long | long | double +long | integer | double | double | double +long | integer | double | integer | double +long | integer | double | long | double +long | integer | integer | double | double +long | integer | integer | integer | double +long | integer | integer | long | double +long | integer | long | double | double +long | integer | long | integer | double +long | integer | long | long | double +|=== diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc similarity index 67% rename from docs/reference/esql/functions/types/auto_bucket.asciidoc rename to docs/reference/esql/functions/types/date_trunc.asciidoc index 535e2df29c353..8df45cfef54a8 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -4,6 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -field | buckets | from | to | result - +interval | date | result +date_period | datetime | datetime +time_duration | datetime | datetime |=== diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 76698501fd416..34248cc5f98d3 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -145,10 +145,16 @@ the alias points to one data stream. + Only the `add` action supports this parameter. +// tag::alias-options[] `must_exist`:: (Optional, Boolean) -If `true`, the alias must exist to perform the action. Defaults to `false`. Only -the `remove` action supports this parameter. +Affects the behavior when attempting to remove an alias which does not exist. +If `true`, removing an alias which does not exist will cause all actions to fail. +If `false`, removing an alias which does not exist will only cause that removal to fail. +Defaults to `false`. +// end::alias-options[] ++ +Only the `remove` action supports this parameter. // tag::alias-options[] `routing`:: @@ -168,3 +174,51 @@ stream aliases don't support this parameter. Only the `add` action supports this parameter. ===== ==== + + + +[role="child_attributes"] +[[indices-aliases-api-response-body]] +==== {api-response-body-title} + +`acknowledged`:: +(Boolean) +If `true`, the request received a response from the master node within the +`timeout` period. + +`errors`:: +(Boolean) +If `true`, at least one of the requested actions failed. + +`action_results`:: +(Optional, array of objects) Results for each requested action. ++ +.Properties of `action_results` objects +[%collapsible%open] +==== + +`action`:: +(object) +Description of the associated action request. ++ +.Properties of `action` object +[%collapsible%open] +===== +`type`:: +(string) The type of the associated action, one of `add`, `remove`, or `remove_index`. + +`indices`:: +(array of strings) List of indices in the associated action. + +`aliases`:: +(array of strings) List of aliases in the associated action. +===== + +`status`:: +(integer) HTTP status code returned for the action. + +`error`:: +(Optional, object) Contains additional information about the failed action. ++ +Only present if the action failed. +==== diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 6df1993175a0d..7d0ede82f70fa 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -165,7 +165,7 @@ want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `model_id`::: -(Optional, string) +(Required, string) The name of the model to use for the {infer} task. Refer to the https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] for the list of available text embedding models. @@ -431,4 +431,3 @@ PUT _inference/completion/openai_completion } ------------------------------------------------------------ // TEST[skip:TBD] - diff --git a/docs/reference/ingest/apis/geoip-stats-api.asciidoc b/docs/reference/ingest/apis/geoip-stats-api.asciidoc index 6ef0db546342b..84a2b00737e5a 100644 --- a/docs/reference/ingest/apis/geoip-stats-api.asciidoc +++ b/docs/reference/ingest/apis/geoip-stats-api.asciidoc @@ -4,8 +4,8 @@ GeoIP stats ++++ -Gets download statistics for GeoIP2 databases used with the -<>. +Gets statistics about the <>, including +download statistics for GeoIP2 databases used with it. [source,console] ---- @@ -60,7 +60,7 @@ Total number of database updates skipped. `nodes`:: (object) -Downloaded GeoIP2 databases for each node. +Statistics for each node. + .Properties of `nodes` [%collapsible%open] @@ -90,4 +90,4 @@ Downloaded database files, including related license files. {es} stores these files in the node's <>: `$ES_TMPDIR/geoip-databases/`. ===== -==== \ No newline at end of file +==== diff --git a/docs/reference/mapping/types/sparse-vector.asciidoc b/docs/reference/mapping/types/sparse-vector.asciidoc index 17a193eef1d4d..6c7ad6550753e 100644 --- a/docs/reference/mapping/types/sparse-vector.asciidoc +++ b/docs/reference/mapping/types/sparse-vector.asciidoc @@ -26,6 +26,8 @@ PUT my-index See <> for a complete example on adding documents to a `sparse_vector` mapped field using ELSER. +NOTE: `sparse_vector` fields can not be included in indices that were *created* on {es} versions between 8.0 and 8.10 + NOTE: `sparse_vector` fields only support single-valued fields and strictly positive values. Multi-valued fields and negative values will be rejected. diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index f9da92aef925e..05c97d51a38e7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,7 @@ This section summarizes the changes in each release. * <> +* <> * <> * <> * <> @@ -64,6 +65,7 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.2.asciidoc[] include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 99ee4e5fb86e1..bcb533049f27d 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -389,7 +389,7 @@ Security:: Snapshot/Restore:: * Add s3 `HeadObject` request to request stats {es-pull}105105[#105105] -* Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs {es-pull}105044[#105044] +* Expose `OperationPurpose` in S3 access logs using a https://docs.aws.amazon.com/AmazonS3/latest/userguide/LogFormat.html#LogFormatCustom[custom query-string parameter] {es-pull}105044[#105044] * Fix blob cache race, decay, time dependency {es-pull}104784[#104784] * Pause shard snapshots on graceful shutdown {es-pull}101717[#101717] * Retry indefinitely for s3 indices blob read errors {es-pull}103300[#103300] diff --git a/docs/reference/release-notes/8.13.2.asciidoc b/docs/reference/release-notes/8.13.2.asciidoc new file mode 100644 index 0000000000000..1da23b5125833 --- /dev/null +++ b/docs/reference/release-notes/8.13.2.asciidoc @@ -0,0 +1,31 @@ +[[release-notes-8.13.2]] +== {es} version 8.13.2 + +Also see <>. + +[[bug-8.13.2]] +[float] +=== Bug fixes + +Aggregations:: +* Address concurrency issue in top hits aggregation {es-pull}106990[#106990] + +Application:: +* [Connector API] Support numeric for configuration select option value type {es-pull}107059[#107059] + +Downsampling:: +* Fix a downsample persistent task assignment bug {es-pull}106247[#106247] +* Fix downsample action request serialization {es-pull}106920[#106920] + +ES|QL:: +* ESQL: Fix fully pruned aggregates {es-pull}106673[#106673] (issue: {es-issue}106427[#106427]) + +[[enhancement-8.13.2]] +[float] +=== Enhancements + +Security:: +* Query API Key Information API support for the `typed_keys` request parameter {es-pull}106873[#106873] (issue: {es-issue}106817[#106817]) +* Query API Keys support for both `aggs` and `aggregations` keywords {es-pull}107054[#107054] (issue: {es-issue}106839[#106839]) + + diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 0c79793ee6c5a..11324639cb2f0 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -541,13 +541,17 @@ MinIO-backed repositories as well as repositories stored on AWS S3. Other S3-compatible storage systems may also work with {es}, but these are not covered by the {es} test suite. -Note that some storage systems claim to be S3-compatible but do not faithfully -emulate S3's behaviour in full. The `repository-s3` type requires full -compatibility with S3. In particular it must support the same set of API -endpoints, return the same errors in case of failures, and offer consistency and -performance at least as good as S3 even when accessed concurrently by multiple -nodes. You will need to work with the supplier of your storage system to address -any incompatibilities you encounter. Please do not report {es} issues involving +There are many systems, including some from very well-known storage vendors, +which claim to offer an S3-compatible API despite failing to emulate S3's +behaviour in full. If you are using such a system for your snapshots, consider +using a <> based +on a standardized protocol such as NFS to access your storage system instead. +The `repository-s3` type requires full compatibility with S3. In particular it +must support the same set of API endpoints, with the same parameters, return +the same errors in case of failures, and offer consistency and performance at +least as good as S3 even when accessed concurrently by multiple nodes. You will +need to work with the supplier of your storage system to address any +incompatibilities you encounter. Please do not report {es} issues involving storage systems which claim to be S3-compatible unless you can demonstrate that the same issue exists when using a genuine AWS S3 repository. diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index ab531b69be947..68e4dcf0d2d99 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -7,13 +7,12 @@ */ package org.elasticsearch.aggregations.bucket.histogram; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.TransportVersions; import org.elasticsearch.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.LongObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -22,6 +21,7 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -35,6 +35,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.ListIterator; import java.util.Map; @@ -232,6 +233,11 @@ public InternalAutoDateHistogram(StreamInput in) throws IOException { } else { bucketInnerInterval = 1; // Calculated on merge. } + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingLong(b -> b.key)); + } } @Override @@ -287,6 +293,61 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } + /** + * This method works almost exactly the same as + * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different + * here is that we need to round all the keys we see using the highest level + * rounding returned across all the shards so the resolution of the buckets + * is the same and they can be reduced together. + */ + private BucketReduceResult reduceBuckets( + PriorityQueue> pq, + int reduceRoundingIdx, + long min, + long max, + AggregationReduceContext reduceContext + ) { + // First we need to find the highest level rounding used across all the + // shards + Rounding.Prepared reduceRounding = prepare(reduceRoundingIdx, min, max); + + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + long key = reduceRounding.round(pq.top().current().key); + + do { + final IteratorAndCurrent top = pq.top(); + + if (reduceRounding.round(top.current().key) != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = reduceRounding.round(top.current().key); + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key > key : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reducedBuckets.add(reduced); + } + } + + return mergeBucketsIfNeeded(new BucketReduceResult(reducedBuckets, reduceRoundingIdx, 1, reduceRounding, min, max), reduceContext); + } + private BucketReduceResult mergeBucketsIfNeeded(BucketReduceResult firstPassResult, AggregationReduceContext reduceContext) { int idx = firstPassResult.roundingIdx; RoundingInfo info = bucketInfo.roundingInfos[idx]; @@ -338,13 +399,12 @@ private List mergeBuckets( private Bucket reduceBucket(List buckets, AggregationReduceContext context) { assert buckets.isEmpty() == false; - long docCount = 0; - for (Bucket bucket : buckets) { - docCount += bucket.docCount; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - return new InternalAutoDateHistogram.Bucket(buckets.get(0).key, docCount, format, aggs); } private record BucketReduceResult( @@ -434,87 +494,33 @@ static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, Rou return currentRoundingIdx - 1; } - /** - * This method works almost exactly the same as - * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different - * here is that we need to round all the keys we see using the highest level - * rounding returned across all the shards so the resolution of the buckets - * is the same and they can be reduced together. - */ @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); - int reduceRoundingIdx = 0; - long min = Long.MAX_VALUE; - long max = Long.MIN_VALUE; + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key < b.current().key; + } + }; + private int reduceRoundingIdx = 0; + private long min = Long.MAX_VALUE; + private long max = Long.MIN_VALUE; @Override public void accept(InternalAggregation aggregation) { - final InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; + InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; reduceRoundingIdx = Math.max(histogram.bucketInfo.roundingIdx, reduceRoundingIdx); - if (false == histogram.buckets.isEmpty()) { + if (histogram.buckets.isEmpty() == false) { min = Math.min(min, histogram.buckets.get(0).key); max = Math.max(max, histogram.buckets.get(histogram.buckets.size() - 1).key); - for (Bucket bucket : histogram.buckets) { - BucketReducer reducer = bucketsReducer.get(bucket.key); - if (reducer == null) { - reducer = new BucketReducer<>(bucket, reduceContext, size); - bucketsReducer.put(bucket.key, reducer); - } - reducer.accept(bucket); - } + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - // First we need to find the highest level rounding used across all the - // shards - final Rounding.Prepared reduceRounding = prepare(reduceRoundingIdx, min, max); - - final long[] keys = new long[(int) bucketsReducer.size()]; - { - // fill the array and sort it - final int[] index = new int[] { 0 }; - bucketsReducer.forEach(c -> keys[index[0]++] = c.key); - Arrays.sort(keys); - } - - final List reducedBuckets = new ArrayList<>(); - if (keys.length > 0) { - // list of buckets coming from different shards that have the same key - BucketReducer currentReducer = null; - long key = reduceRounding.round(keys[0]); - for (long top : keys) { - if (reduceRounding.round(top) != key) { - assert currentReducer != null; - // the key changes, reduce what we already buffered and reset the buffer for current buckets - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); - currentReducer = null; - key = reduceRounding.round(top); - } - - final BucketReducer nextReducer = bucketsReducer.get(top); - if (currentReducer == null) { - currentReducer = nextReducer; - } else { - currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.getAggregations())); - } - } - - if (currentReducer != null) { - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); - } - } - - BucketReduceResult reducedBucketsResult = mergeBucketsIfNeeded( - new BucketReduceResult(reducedBuckets, reduceRoundingIdx, 1, reduceRounding, min, max), - reduceContext - ); + BucketReduceResult reducedBucketsResult = reduceBuckets(pq, reduceRoundingIdx, min, max, reduceContext); if (reduceContext.isFinalReduce()) { // adding empty buckets if needed @@ -543,12 +549,6 @@ public InternalAggregation get() { reducedBucketsResult.innerInterval ); } - - @Override - public void close() { - bucketsReducer.forEach(c -> Releasables.close(c.value)); - Releasables.close(bucketsReducer); - } }; } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml index 70c563d1d4510..1050d6e01a95f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml @@ -307,3 +307,86 @@ indices.get_alias: name: this-does-not-exist* - is_false: ds-first.aliases.my-alias +--- +"Action Results with multiple matching aliases": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + features: allowed_warnings + - do: + allowed_warnings: + - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ log-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + - do: + indices.create_data_stream: + name: log-foobar + - is_true: acknowledged + - do: + indices.update_aliases: + body: + actions: + - add: + index: log-foobar + aliases: test_alias1 + - remove: + index: log-foobar + aliases: test_non_existing + must_exist: false + - is_true: errors + - length: { action_results: 2 } + - match: { action_results.0.status: 200 } + - match: { action_results.0.action: { 'type': 'add', 'indices': ['log-foobar'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['log-foobar'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"Single action result per action": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + features: allowed_warnings + - do: + allowed_warnings: + - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ log-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + - do: + indices.create_data_stream: + name: log-test-1 + - do: + indices.create_data_stream: + name: log-test-2 + - is_true: acknowledged + - do: + indices.update_aliases: + body: + actions: + - add: + index: log-test-* + aliases: test_alias1 + - remove: + index: log-test-* + aliases: test_non_existing + must_exist: false + - is_true: errors + - length: { action_results: 2 } + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['log-test-1', 'log-test-2'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['log-test-1', 'log-test-2'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java index d1cdc719b02f1..49ab73e8d2375 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java @@ -92,7 +92,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return new TestDocumentSizeReporter(); } }; diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 54d465aecda52..9dcd8abc7bc57 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.IngestPlugin; @@ -121,13 +121,10 @@ public void cleanUp() throws Exception { } }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); - assertThat(response.getStats().getDatabasesCount(), equalTo(0)); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); + assertThat(response.getDownloaderStats().getDatabasesCount(), equalTo(0)); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); assertThat(nodeResponse.getDatabases(), empty()); assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).toList(), empty()); @@ -703,12 +700,9 @@ private void setupDatabasesInConfigDirectory() throws Exception { }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat( nodeResponse.getConfigDatabases(), containsInAnyOrder("GeoLite2-Country.mmdb", "GeoLite2-City.mmdb", "GeoLite2-ASN.mmdb") @@ -751,12 +745,9 @@ private void deleteDatabasesInConfigDirectory() throws Exception { }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); } }); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java index 77b0faeeb6ebd..ec54317e144d1 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; @@ -65,8 +65,8 @@ public void testStats() throws Exception { * slowly to pass. */ assumeTrue("only test with fixture to have stable results", getEndpoint() != null); - GeoIpDownloaderStatsAction.Request req = new GeoIpDownloaderStatsAction.Request(); - GeoIpDownloaderStatsAction.Response response = client().execute(GeoIpDownloaderStatsAction.INSTANCE, req).actionGet(); + GeoIpStatsAction.Request req = new GeoIpStatsAction.Request(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, req).actionGet(); XContentTestUtils.JsonMapView jsonMapView = new XContentTestUtils.JsonMapView(convertToMap(response)); assertThat(jsonMapView.get("stats.successful_downloads"), equalTo(0)); assertThat(jsonMapView.get("stats.failed_downloads"), equalTo(0)); @@ -78,7 +78,7 @@ public void testStats() throws Exception { updateClusterSettings(Settings.builder().put(GeoIpDownloaderTaskExecutor.ENABLED_SETTING.getKey(), true)); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response res = client().execute(GeoIpDownloaderStatsAction.INSTANCE, req).actionGet(); + GeoIpStatsAction.Response res = client().execute(GeoIpStatsAction.INSTANCE, req).actionGet(); XContentTestUtils.JsonMapView view = new XContentTestUtils.JsonMapView(convertToMap(res)); assertThat(view.get("stats.successful_downloads"), equalTo(4)); assertThat(view.get("stats.failed_downloads"), equalTo(0)); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 2e0a84cfde23b..e5756652a9842 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -28,9 +28,9 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStats; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsTransportAction; -import org.elasticsearch.ingest.geoip.stats.RestGeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsTransportAction; +import org.elasticsearch.ingest.geoip.stats.RestGeoIpStatsAction; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksExecutor; @@ -144,7 +144,7 @@ public List> getPersistentTasksExecutor( @Override public List> getActions() { - return List.of(new ActionHandler<>(GeoIpDownloaderStatsAction.INSTANCE, GeoIpDownloaderStatsTransportAction.class)); + return List.of(new ActionHandler<>(GeoIpStatsAction.INSTANCE, GeoIpStatsTransportAction.class)); } @Override @@ -159,7 +159,7 @@ public List getRestHandlers( Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - return List.of(new RestGeoIpDownloaderStatsAction()); + return List.of(new RestGeoIpStatsAction()); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java similarity index 88% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java index f9b1d8c637f68..db1242888ca82 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java @@ -30,11 +30,11 @@ import java.util.Objects; import java.util.Set; -public class GeoIpDownloaderStatsAction { +public class GeoIpStatsAction { public static final ActionType INSTANCE = new ActionType<>("cluster:monitor/ingest/geoip/stats"); - private GeoIpDownloaderStatsAction() {/* no instances */} + private GeoIpStatsAction() {/* no instances */} public static class Request extends BaseNodesRequest implements ToXContentObject { @@ -89,8 +89,8 @@ public Response(ClusterName clusterName, List nodes, List n.stats).filter(Objects::nonNull).findFirst().orElse(GeoIpDownloaderStats.EMPTY); + public GeoIpDownloaderStats getDownloaderStats() { + return getNodes().stream().map(n -> n.downloaderStats).filter(Objects::nonNull).findFirst().orElse(GeoIpDownloaderStats.EMPTY); } @Override @@ -105,7 +105,7 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws I @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - GeoIpDownloaderStats stats = getStats(); + GeoIpDownloaderStats stats = getDownloaderStats(); builder.startObject(); builder.field("stats", stats); builder.startObject("nodes"); @@ -153,14 +153,14 @@ public int hashCode() { public static class NodeResponse extends BaseNodeResponse { - private final GeoIpDownloaderStats stats; + private final GeoIpDownloaderStats downloaderStats; private final Set databases; private final Set filesInTemp; private final Set configDatabases; protected NodeResponse(StreamInput in) throws IOException { super(in); - stats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; + downloaderStats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; databases = in.readCollectionAsImmutableSet(StreamInput::readString); filesInTemp = in.readCollectionAsImmutableSet(StreamInput::readString); configDatabases = in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) @@ -170,20 +170,20 @@ protected NodeResponse(StreamInput in) throws IOException { protected NodeResponse( DiscoveryNode node, - GeoIpDownloaderStats stats, + GeoIpDownloaderStats downloaderStats, Set databases, Set filesInTemp, Set configDatabases ) { super(node); - this.stats = stats; + this.downloaderStats = downloaderStats; this.databases = Set.copyOf(databases); this.filesInTemp = Set.copyOf(filesInTemp); this.configDatabases = Set.copyOf(configDatabases); } - public GeoIpDownloaderStats getStats() { - return stats; + public GeoIpDownloaderStats getDownloaderStats() { + return downloaderStats; } public Set getDatabases() { @@ -201,9 +201,9 @@ public Set getConfigDatabases() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(stats != null); - if (stats != null) { - stats.writeTo(out); + out.writeBoolean(downloaderStats != null); + if (downloaderStats != null) { + downloaderStats.writeTo(out); } out.writeStringCollection(databases); out.writeStringCollection(filesInTemp); @@ -217,7 +217,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NodeResponse that = (NodeResponse) o; - return stats.equals(that.stats) + return downloaderStats.equals(that.downloaderStats) && databases.equals(that.databases) && filesInTemp.equals(that.filesInTemp) && Objects.equals(configDatabases, that.configDatabases); @@ -225,7 +225,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(stats, databases, filesInTemp, configDatabases); + return Objects.hash(downloaderStats, databases, filesInTemp, configDatabases); } } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java similarity index 80% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java index 0958002405fbe..13f9544e1b9e4 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java @@ -18,10 +18,10 @@ import org.elasticsearch.ingest.geoip.DatabaseNodeService; import org.elasticsearch.ingest.geoip.GeoIpDownloader; import org.elasticsearch.ingest.geoip.GeoIpDownloaderTaskExecutor; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.NodeRequest; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.NodeResponse; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.Request; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.Response; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.NodeRequest; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.NodeResponse; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.Request; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.Response; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -29,14 +29,14 @@ import java.io.IOException; import java.util.List; -public class GeoIpDownloaderStatsTransportAction extends TransportNodesAction { +public class GeoIpStatsTransportAction extends TransportNodesAction { private final TransportService transportService; private final DatabaseNodeService registry; private final GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor; @Inject - public GeoIpDownloaderStatsTransportAction( + public GeoIpStatsTransportAction( TransportService transportService, ClusterService clusterService, ThreadPool threadPool, @@ -45,7 +45,7 @@ public GeoIpDownloaderStatsTransportAction( GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor ) { super( - GeoIpDownloaderStatsAction.INSTANCE.name(), + GeoIpStatsAction.INSTANCE.name(), clusterService, transportService, actionFilters, @@ -75,10 +75,10 @@ protected NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throw @Override protected NodeResponse nodeOperation(NodeRequest request, Task task) { GeoIpDownloader geoIpTask = geoIpDownloaderTaskExecutor.getCurrentTask(); - GeoIpDownloaderStats stats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); + GeoIpDownloaderStats downloaderStats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); return new NodeResponse( transportService.getLocalNode(), - stats, + downloaderStats, registry.getAvailableDatabases(), registry.getFilesInTemp(), registry.getConfigDatabases() diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java similarity index 80% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java index 49f3ee81c7f62..ac6022205d04e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java @@ -20,7 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; @ServerlessScope(Scope.INTERNAL) -public class RestGeoIpDownloaderStatsAction extends BaseRestHandler { +public class RestGeoIpStatsAction extends BaseRestHandler { @Override public String getName() { @@ -34,10 +34,6 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - return channel -> client.execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request(), - new RestToXContentListener<>(channel) - ); + return channel -> client.execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request(), new RestToXContentListener<>(channel)); } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java similarity index 68% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java index 54193967ba853..1008dcf56c4f1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java @@ -15,30 +15,29 @@ import java.util.Set; -public class GeoIpDownloaderStatsActionNodeResponseSerializingTests extends AbstractWireSerializingTestCase< - GeoIpDownloaderStatsAction.NodeResponse> { +public class GeoIpStatsActionNodeResponseSerializingTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return GeoIpDownloaderStatsAction.NodeResponse::new; + protected Writeable.Reader instanceReader() { + return GeoIpStatsAction.NodeResponse::new; } @Override - protected GeoIpDownloaderStatsAction.NodeResponse createTestInstance() { + protected GeoIpStatsAction.NodeResponse createTestInstance() { return createRandomInstance(); } @Override - protected GeoIpDownloaderStatsAction.NodeResponse mutateInstance(GeoIpDownloaderStatsAction.NodeResponse instance) { + protected GeoIpStatsAction.NodeResponse mutateInstance(GeoIpStatsAction.NodeResponse instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } - static GeoIpDownloaderStatsAction.NodeResponse createRandomInstance() { + static GeoIpStatsAction.NodeResponse createRandomInstance() { DiscoveryNode node = DiscoveryNodeUtils.create("id"); Set databases = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set files = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set configDatabases = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); - return new GeoIpDownloaderStatsAction.NodeResponse( + return new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), databases, diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java similarity index 91% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java index a0fd470ef0468..27a332c3b42f9 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java @@ -18,14 +18,14 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class GeoIpDownloaderStatsActionNodeResponseTests extends ESTestCase { +public class GeoIpStatsActionNodeResponseTests extends ESTestCase { public void testInputsAreDefensivelyCopied() { DiscoveryNode node = DiscoveryNodeUtils.create("id"); Set databases = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set files = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set configDatabases = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); - GeoIpDownloaderStatsAction.NodeResponse nodeResponse = new GeoIpDownloaderStatsAction.NodeResponse( + GeoIpStatsAction.NodeResponse nodeResponse = new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), databases, diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java similarity index 50% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java index d566fa8838df1..6e057843b9776 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java @@ -14,25 +14,24 @@ import java.util.List; -public class GeoIpDownloaderStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase< - GeoIpDownloaderStatsAction.Response> { +public class GeoIpStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return GeoIpDownloaderStatsAction.Response::new; + protected Writeable.Reader instanceReader() { + return GeoIpStatsAction.Response::new; } @Override - protected GeoIpDownloaderStatsAction.Response createTestInstance() { - List nodeResponses = randomList( + protected GeoIpStatsAction.Response createTestInstance() { + List nodeResponses = randomList( 10, - GeoIpDownloaderStatsActionNodeResponseSerializingTests::createRandomInstance + GeoIpStatsActionNodeResponseSerializingTests::createRandomInstance ); - return new GeoIpDownloaderStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); + return new GeoIpStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); } @Override - protected GeoIpDownloaderStatsAction.Response mutateInstance(GeoIpDownloaderStatsAction.Response instance) { + protected GeoIpStatsAction.Response mutateInstance(GeoIpStatsAction.Response instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index e2b932b01a516..09507ae926f44 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -340,7 +340,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); return new ScaledFloatIndexFieldData(scaledValues, scalingFactor, ScaledFloatDocValuesField::new); }; @@ -608,6 +609,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; // We don't know how to take advantage of the index with half floats anyway + } + @Override public NumericType getNumericType() { /* diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index cf3bc21526bf6..13e582598a2d2 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -267,7 +267,7 @@ protected S3Repository createRepository( ) { return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, s3RepositoriesMetrics) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 50470ec499ef6..ff61504d6c525 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -132,7 +132,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { S3RepositoriesMetrics.NOOP ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java index 00abf1e77fd57..a02bff59988d8 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java @@ -43,7 +43,7 @@ private URLRepository createRepository(Settings baseSettings, RepositoryMetadata mock(URLHttpClient.Factory.class) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 08a133bcb69c8..44f52105f64c9 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -80,7 +80,7 @@ public String typeName() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new, isIndexed()); } @Override diff --git a/qa/rolling-upgrade-legacy/build.gradle b/qa/rolling-upgrade-legacy/build.gradle index e48d05f98b20a..77dfc9724ce8f 100644 --- a/qa/rolling-upgrade-legacy/build.gradle +++ b/qa/rolling-upgrade-legacy/build.gradle @@ -7,8 +7,8 @@ */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -59,11 +59,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.upgrade_from_version', oldVersion nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["old_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["old_cluster/30_vector_search/Create indexed byte vectors and search"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -81,11 +76,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,11 +93,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -124,11 +109,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["upgraded_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["upgraded_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml deleted file mode 100644 index 108f58b29bf27..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml +++ /dev/null @@ -1,144 +0,0 @@ ---- -"Search float indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - ---- -"Search byte indices created in old cluster": - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } - - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } - - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml deleted file mode 100644 index 96b950e5ae927..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ /dev/null @@ -1,236 +0,0 @@ ---- -"Create indexed float vectors and search": - - skip: - features: close_to - - do: - indices.create: - index: test-float-index - body: - settings: - number_of_shards: "1" - mappings: - properties: - bdv: - type: dense_vector - dims: 3 - knn: - type: dense_vector - dims: 3 - index: true - similarity: l2_norm - index_options: - type: hnsw - m: 16 - ef_construction: 100 - - do: - bulk: - index: test-float-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' - - '{"index": {"_id": "2"}}' - - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' - - '{"index": {"_id": "3"}}' - - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' - - '{"index": {"_id": "4"}}' - - '{"knn": [1, 2, 1]}' - - '{"index": {"_id": "5"}}' - - '{"knn": [1, 3, 1]}' - - '{"index": {"_id": "6"}}' - - '{"knn": [2, 1, 1]}' - - '{"index": {"_id": "7"}}' - - '{"knn": [3, 1, 1]}' - - '{"index": {"_id": "missing_vector"}}' - - '{}' - - do: - indices.forcemerge: - index: test-float-index - max_num_segments: 1 - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - ---- -"Create indexed byte vectors and search": - - skip: - features: close_to - - do: - indices.create: - index: test-byte-index - body: - settings: - number_of_shards: "1" - mappings: - properties: - bdv: - type: dense_vector - element_type: byte - dims: 3 - knn: - type: dense_vector - element_type: byte - dims: 3 - index: true - similarity: l2_norm - - do: - bulk: - index: test-byte-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' - - '{"index": {"_id": "2"}}' - - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' - - '{"index": {"_id": "3"}}' - - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' - - '{"index": {"_id": "4"}}' - - '{"knn": [1, 2, 1]}' - - '{"index": {"_id": "5"}}' - - '{"knn": [1, 3, 1]}' - - '{"index": {"_id": "6"}}' - - '{"knn": [2, 1, 1]}' - - '{"index": {"_id": "7"}}' - - '{"knn": [3, 1, 1]}' - - '{"index": {"_id": "missing_vector"}}' - - '{}' - - do: - indices.forcemerge: - index: test-byte-index - max_num_segments: 1 - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml deleted file mode 100644 index ee2c357594b94..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml +++ /dev/null @@ -1,148 +0,0 @@ ---- -"Search float indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - ---- -"Search byte indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java new file mode 100644 index 0000000000000..d77910f443d58 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -0,0 +1,389 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; + +public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { + public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + private static final String FLOAT_INDEX_NAME = "float_vector_index"; + private static final String SCRIPT_VECTOR_INDEX_NAME = "script_vector_index"; + private static final String SCRIPT_BYTE_INDEX_NAME = "script_byte_vector_index"; + private static final String BYTE_INDEX_NAME = "byte_vector_index"; + private static final String QUANTIZED_INDEX_NAME = "quantized_vector_index"; + private static final String FLOAT_VECTOR_SEARCH_VERSION = "8.4.0"; + private static final String BYTE_VECTOR_SEARCH_VERSION = "8.6.0"; + private static final String QUANTIZED_VECTOR_SEARCH_VERSION = "8.12.1"; + + public void testScriptByteVectorSearch() throws Exception { + assumeTrue("byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); + if (isOldCluster()) { + // create index and index 10 random floating point vectors + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "element_type": "byte", + "index": false + } + } + } + """; + createIndex(SCRIPT_BYTE_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(SCRIPT_BYTE_INDEX_NAME); + // refresh the index + client().performRequest(new Request("POST", "/" + SCRIPT_BYTE_INDEX_NAME + "/_refresh")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + SCRIPT_BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + } + + public void testScriptVectorSearch() throws Exception { + assumeTrue( + "Float vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(FLOAT_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + // create index and index 10 random floating point vectors + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": false + } + } + } + """; + createIndex(SCRIPT_VECTOR_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(SCRIPT_VECTOR_INDEX_NAME); + // refresh the index + client().performRequest(new Request("POST", "/" + SCRIPT_VECTOR_INDEX_NAME + "/_refresh")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + SCRIPT_VECTOR_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + } + + public void testFloatVectorSearch() throws Exception { + assumeTrue( + "Float vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(FLOAT_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(FLOAT_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(FLOAT_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + FLOAT_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + FLOAT_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + FLOAT_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("2")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.028571429, 0.0001)); + } + + public void testByteVectorSearch() throws Exception { + assumeTrue("Byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "element_type": "byte", + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(BYTE_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(BYTE_INDEX_NAME); + // refresh the index + // force merge the index + client().performRequest(new Request("POST", "/" + BYTE_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("2")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.028571429, 0.0001)); + } + + public void testQuantizedVectorSearch() throws Exception { + assumeTrue( + "Quantized vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(QUANTIZED_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "cosine", + "index_options": { + "type": "int8_hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(QUANTIZED_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(QUANTIZED_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + Request searchRequest = new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.0001)); + } + + private void indexVectors(String indexName) throws Exception { + String[] vectors = new String[] { + "{\"vector\":[1, 1, 1]}", + "{\"vector\":[1, 1, 2]}", + "{\"vector\":[1, 1, 3]}", + "{\"vector\":[1, 2, 1]}", + "{\"vector\":[1, 3, 1]}", + "{\"vector\":[2, 1, 1]}", + "{\"vector\":[3, 1, 1]}", + "{}" }; + for (int i = 0; i < vectors.length; i++) { + Request indexRequest = new Request("PUT", "/" + indexName + "/_doc/" + i); + indexRequest.setJsonEntity(vectors[i]); + assertOK(client().performRequest(indexRequest)); + } + } + + private static Map search(Request request) throws IOException { + final Response response = client().performRequest(request); + assertOK(response); + return responseAsMap(response); + } + + @SuppressWarnings("unchecked") + private static T extractValue(Map map, String path) { + return (T) XContentMapValues.extractValue(path, map); + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml index dbe167608e576..fa3c740612872 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml @@ -82,3 +82,100 @@ - remove_index: index: test_index must_exist: true +--- +"Partial success with must_exist == false": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - remove: + index: test_index + aliases: test_non_existing + must_exist: false + - is_true: errors + - match: { action_results.0.status: 200 } + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"Partial success with must_exist == null (default)": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - remove: + index: test_index + aliases: test_non_existing + - is_true: errors + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404} + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"No action_results field if all actions successful": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - is_false: errors + - match: { action_results: null } +--- +"Single result per input action": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index1 + - do: + indices.create: + index: test_index2 + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index* + aliases: test_alias1 + - remove: + index: test_index* + aliases: test_non_existing + - length: { action_results: 2 } + - is_true: errors + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index1', 'test_index2'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404} + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index1', 'test_index2'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 409fbdd70333e..e0dbc74567053 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -61,6 +61,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; import static org.elasticsearch.cluster.metadata.AliasMetadata.newAliasMetadataBuilder; +import static org.elasticsearch.cluster.metadata.IndexMetadataTests.randomInferenceFields; import static org.elasticsearch.cluster.routing.RandomShardRoutingMutator.randomChange; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassignedInfo; @@ -571,7 +572,7 @@ public IndexMetadata randomCreate(String name) { @Override public IndexMetadata randomChange(IndexMetadata part) { IndexMetadata.Builder builder = IndexMetadata.builder(part); - switch (randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 3)) { case 0: builder.settings(Settings.builder().put(part.getSettings()).put(randomSettings(Settings.EMPTY))); break; @@ -585,6 +586,9 @@ public IndexMetadata randomChange(IndexMetadata part) { case 2: builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)); break; + case 3: + builder.putInferenceFields(randomInferenceFields()); + break; default: throw new IllegalArgumentException("Shouldn't be here"); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 43506647f89ba..cd0bf5c428118 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -103,10 +103,6 @@ public void testElectOnlyBetweenMasterNodes() throws Exception { internalCluster().nonMasterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName) ); - assertThat( - internalCluster().nonMasterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), - equalTo(masterNodeName) - ); assertThat( internalCluster().masterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java index fd6151e8eadde..edf6973849bad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java @@ -86,7 +86,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return new TestDocumentSizeReporter(); } }; diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 83b8606da2997..abfea0b18b9d8 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import org.elasticsearch.index.codec.Elasticsearch814Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; @@ -243,6 +244,7 @@ exports org.elasticsearch.index.codec; exports org.elasticsearch.index.codec.tsdb; exports org.elasticsearch.index.codec.bloomfilter; + exports org.elasticsearch.index.codec.zstd; exports org.elasticsearch.index.engine; exports org.elasticsearch.index.fielddata; exports org.elasticsearch.index.fielddata.fieldcomparator; @@ -433,6 +435,7 @@ with org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat, org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat; + provides org.apache.lucene.codecs.Codec with Elasticsearch814Codec; exports org.elasticsearch.cluster.routing.allocation.shards to diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5c22d087c1acf..5b88d64c40c05 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -164,6 +164,9 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); + public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); + public static final TransportVersion HISTOGRAM_AGGS_KEY_SORTED = def(8_627_00_0); + public static final TransportVersion INFERENCE_FIELDS_METADATA = def(8_628_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java index b52098a49c002..1f87cf618dfcf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.action.admin.indices.alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; import org.elasticsearch.cluster.metadata.AliasAction; @@ -18,8 +19,11 @@ public class IndicesAliasesClusterStateUpdateRequest extends ClusterStateUpdateRequest { private final List actions; - public IndicesAliasesClusterStateUpdateRequest(List actions) { + private final List actionResults; + + public IndicesAliasesClusterStateUpdateRequest(List actions, List actionResults) { this.actions = actions; + this.actionResults = actionResults; } /** @@ -28,4 +32,8 @@ public IndicesAliasesClusterStateUpdateRequest(List actions) { public List actions() { return actions; } + + public List getActionResults() { + return actionResults; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index a4f5ee9eb672b..fac2006b68814 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -83,7 +83,6 @@ public static class AliasActions implements AliasesRequest, Writeable, ToXConten private static final ParseField IS_WRITE_INDEX = new ParseField("is_write_index"); private static final ParseField IS_HIDDEN = new ParseField("is_hidden"); private static final ParseField MUST_EXIST = new ParseField("must_exist"); - private static final ParseField ADD = new ParseField("add"); private static final ParseField REMOVE = new ParseField("remove"); private static final ParseField REMOVE_INDEX = new ParseField("remove_index"); @@ -105,6 +104,10 @@ public byte value() { return value; } + public String getFieldName() { + return fieldName; + } + public static Type fromValue(byte value) { return switch (value) { case 0 -> ADD; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java index 4e49a5fe8d400..1462e36ea7895 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.index.query.QueryBuilder; @@ -21,7 +20,7 @@ */ public class IndicesAliasesRequestBuilder extends AcknowledgedRequestBuilder< IndicesAliasesRequest, - AcknowledgedResponse, + IndicesAliasesResponse, IndicesAliasesRequestBuilder> { public IndicesAliasesRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java new file mode 100644 index 0000000000000..b4f483e6f8161 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.alias; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Response with error information for a request to add/remove aliases for one or more indices. + * Contains an acknowledged boolean, an errors boolean, and a list of results. + * The result list is only present if there are errors, and contains a result for every input action. + * This response replaces AcknowledgedResponse, and knows how to de/serialize from/to AcknowledgedResponse + * in case of mixed version clusters. + */ +public class IndicesAliasesResponse extends AcknowledgedResponse { + + // Response without any error information, analogous to AcknowledgedResponse.FALSE + public static final IndicesAliasesResponse NOT_ACKNOWLEDGED = new IndicesAliasesResponse(false, false, List.of()); + + // Response without any error information, analogous to AcknowledgedResponse.TRUE + public static final IndicesAliasesResponse ACKNOWLEDGED_NO_ERRORS = new IndicesAliasesResponse(true, false, List.of()); + + private static final String ACTION_RESULTS_FIELD = "action_results"; + private static final String ERRORS_FIELD = "errors"; + + private final List actionResults; + private final boolean errors; + + protected IndicesAliasesResponse(StreamInput in) throws IOException { + super(in); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ALIAS_ACTION_RESULTS)) { + this.errors = in.readBoolean(); + this.actionResults = in.readCollectionAsImmutableList(AliasActionResult::new); + } else { + this.errors = false; + this.actionResults = List.of(); + } + } + + /** + * @param acknowledged whether the update was acknowledged by all the relevant nodes in the cluster + * @param errors true if any of the requested actions failed + * @param actionResults the list of results for each input action, only present if there are errors + */ + IndicesAliasesResponse(boolean acknowledged, boolean errors, final List actionResults) { + super(acknowledged); + this.errors = errors; + this.actionResults = actionResults; + } + + public List getActionResults() { + return actionResults; + } + + public boolean hasErrors() { + return errors; + } + + /** + * Build a response from a list of action results. Sets the errors boolean based + * on whether an of the individual results contain an error. + * @param actionResults an action result for each of the requested alias actions + * @return response containing all action results + */ + public static IndicesAliasesResponse build(final List actionResults) { + assert actionResults.isEmpty() == false : "IndicesAliasesResponse must be instantiated with at least one action result."; + final boolean errors = actionResults.stream().anyMatch(a -> a.error != null); + return new IndicesAliasesResponse(true, errors, actionResults); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ALIAS_ACTION_RESULTS)) { + out.writeBoolean(errors); + out.writeCollection(actionResults); + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field(ERRORS_FIELD, errors); + // if there are no errors, don't provide granular list of results + if (errors) { + builder.field(ACTION_RESULTS_FIELD, actionResults); + } + } + + @Override + // Only used equals in tests + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (super.equals(o) == false) return false; + IndicesAliasesResponse response = (IndicesAliasesResponse) o; + return errors == response.errors && Objects.equals(actionResults, response.actionResults); + } + + @Override + // Only used hashCode in tests + public int hashCode() { + return Objects.hash(super.hashCode(), actionResults, errors); + } + + /** + * Result for a single alias add/remove action + */ + public static class AliasActionResult implements Writeable, ToXContentObject { + + /** + * Resolved indices to which the action applies. This duplicates information + * which exists in the action, but is included because the action indices may + * or may not be resolved depending on if the security layer is used or not. + */ + private final List indices; + private final AliasActions action; + private final ElasticsearchException error; + + /** + * Build result that could be either a success or failure + * @param indices the resolved indices to which the associated action applies + * @param action the alias action consisting of add/remove, aliases, and indices + * @param numAliasesRemoved the number of aliases remove, if any + * @return the action result + */ + public static AliasActionResult build(List indices, AliasActions action, int numAliasesRemoved) { + if (action.actionType() == AliasActions.Type.REMOVE && numAliasesRemoved == 0) { + return buildRemoveError(indices, action); + } + return buildSuccess(indices, action); + } + + /** + * Build an error result for a failed remove action. + */ + private static AliasActionResult buildRemoveError(List indices, AliasActions action) { + return new AliasActionResult(indices, action, new AliasesNotFoundException((action.getOriginalAliases()))); + } + + /** + * Build a success action result with no errors. + */ + public static AliasActionResult buildSuccess(List indices, AliasActions action) { + return new AliasActionResult(indices, action, null); + } + + private int getStatus() { + return error == null ? 200 : error.status().getStatus(); + } + + private AliasActionResult(List indices, AliasActions action, ElasticsearchException error) { + assert indices.isEmpty() == false : "Alias action result must be instantiated with at least one index"; + this.indices = indices; + this.action = action; + this.error = error; + } + + private AliasActionResult(StreamInput in) throws IOException { + this.indices = in.readStringCollectionAsList(); + this.action = new AliasActions(in); + this.error = in.readException(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(indices); + action.writeTo(out); + out.writeException(error); + } + + public static final String ACTION_FIELD = "action"; + public static final String ACTION_TYPE_FIELD = "type"; + public static final String ACTION_INDICES_FIELD = "indices"; + public static final String ACTION_ALIASES_FIELD = "aliases"; + public static final String STATUS_FIELD = "status"; + public static final String ERROR_FIELD = "error"; + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + // include subset of fields from action request + builder.field(ACTION_FIELD); + builder.startObject(); + builder.field(ACTION_TYPE_FIELD, action.actionType().getFieldName()); + builder.field(ACTION_INDICES_FIELD, indices.stream().sorted().collect(Collectors.toList())); + builder.array(ACTION_ALIASES_FIELD, action.getOriginalAliases()); + builder.endObject(); + + builder.field(STATUS_FIELD, getStatus()); + + if (error != null) { + builder.startObject(ERROR_FIELD); + error.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + // Only used equals in tests + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AliasActionResult that = (AliasActionResult) o; + return Objects.equals(indices, that.indices) && Objects.equals(action, that.action) + // ElasticsearchException does not have hashCode() so assume errors are equal iff class and message are equal + && Objects.equals(error == null ? null : error.getMessage(), that.error == null ? null : that.error.getMessage()) + && Objects.equals(error == null ? null : error.getClass(), that.error == null ? null : that.error.getClass()); + } + + @Override + // Only used hashCode in tests + public int hashCode() { + return Objects.hash( + indices, + action, + // ElasticsearchException does not have hashCode() so assume errors are equal iff class and message are equal + error == null ? null : error.getMessage(), + error == null ? null : error.getClass() + ); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index e56be8852e7df..2e231b398af72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RequestValidators; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -56,10 +56,10 @@ /** * Add/remove aliases action */ -public class TransportIndicesAliasesAction extends AcknowledgedTransportMasterNodeAction { +public class TransportIndicesAliasesAction extends TransportMasterNodeAction { public static final String NAME = "indices:admin/aliases"; - public static final ActionType TYPE = new ActionType<>(NAME); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportIndicesAliasesAction.class); private final MetadataIndexAliasesService indexAliasesService; @@ -85,6 +85,7 @@ public TransportIndicesAliasesAction( actionFilters, IndicesAliasesRequest::new, indexNameExpressionResolver, + IndicesAliasesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.indexAliasesService = indexAliasesService; @@ -106,15 +107,19 @@ protected void masterOperation( Task task, final IndicesAliasesRequest request, final ClusterState state, - final ActionListener listener + final ActionListener listener ) { // Expand the indices names List actions = request.aliasActions(); List finalActions = new ArrayList<>(); + List actionResults = new ArrayList<>(); // Resolve all the AliasActions into AliasAction instances and gather all the aliases Set aliases = new HashSet<>(); for (AliasActions action : actions) { + int numAliasesRemoved = 0; + List resolvedIndices = new ArrayList<>(); + List concreteDataStreams = indexNameExpressionResolver.dataStreamNames( state, request.indicesOptions(), @@ -161,18 +166,24 @@ protected void masterOperation( finalActions.add(new AddDataStreamAlias(alias, dataStreamName, action.writeIndex(), action.filter())); } } + + actionResults.add(AliasActionResult.buildSuccess(concreteDataStreams, action)); continue; } case REMOVE -> { for (String dataStreamName : concreteDataStreams) { for (String alias : concreteDataStreamAliases(action, state.metadata(), dataStreamName)) { finalActions.add(new AliasAction.RemoveDataStreamAlias(alias, dataStreamName, action.mustExist())); + numAliasesRemoved++; } } + if (nonBackingIndices.isEmpty() == false) { // Regular aliases/indices match as well with the provided expression. // (Only when adding new aliases, matching both data streams and indices is disallowed) + resolvedIndices.addAll(concreteDataStreams); } else { + actionResults.add(AliasActionResult.build(concreteDataStreams, action, numAliasesRemoved)); continue; } } @@ -224,6 +235,7 @@ protected void masterOperation( case REMOVE: for (String alias : concreteAliases(action, state.metadata(), index.getName())) { finalActions.add(new AliasAction.Remove(index.getName(), alias, action.mustExist())); + numAliasesRemoved++; } break; case REMOVE_INDEX: @@ -233,14 +245,18 @@ protected void masterOperation( throw new IllegalArgumentException("Unsupported action [" + action.actionType() + "]"); } } + + Arrays.stream(concreteIndices).map(Index::getName).forEach(resolvedIndices::add); + actionResults.add(AliasActionResult.build(resolvedIndices, action, numAliasesRemoved)); } if (finalActions.isEmpty() && false == actions.isEmpty()) { throw new AliasesNotFoundException(aliases.toArray(new String[aliases.size()])); } request.aliasActions().clear(); - IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(unmodifiableList(finalActions)) - .ackTimeout(request.timeout()) - .masterNodeTimeout(request.masterNodeTimeout()); + IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest( + unmodifiableList(finalActions), + unmodifiableList(actionResults) + ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); indexAliasesService.indicesAliases(updateRequest, listener.delegateResponse((l, e) -> { logger.debug("failed to perform aliases", e); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 1e9b1446850af..412e4f3c875e8 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -74,16 +75,16 @@ final class BulkOperation extends ActionRunnable { private final long startTimeNanos; private final ClusterStateObserver observer; private final Map indicesThatCannotBeCreated; - private final String executorName; + private final Executor executor; private final LongSupplier relativeTimeProvider; private final FailureStoreDocumentConverter failureStoreDocumentConverter; - private IndexNameExpressionResolver indexNameExpressionResolver; - private NodeClient client; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NodeClient client; BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -97,7 +98,7 @@ final class BulkOperation extends ActionRunnable { this( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -115,7 +116,7 @@ final class BulkOperation extends ActionRunnable { BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -137,7 +138,7 @@ final class BulkOperation extends ActionRunnable { this.listener = listener; this.startTimeNanos = startTimeNanos; this.indicesThatCannotBeCreated = indicesThatCannotBeCreated; - this.executorName = executorName; + this.executor = executor; this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; this.client = client; @@ -543,7 +544,7 @@ public void onTimeout(TimeValue timeout) { } private void dispatchRetry() { - threadPool.executor(executorName).submit(operation); + executor.execute(operation); } }); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index bf50fd06d056b..3494701cf5b7a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -70,6 +70,7 @@ import java.util.Optional; import java.util.Set; import java.util.SortedMap; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -101,6 +102,9 @@ public class TransportBulkAction extends HandledTransportAction releasingListener = ActionListener.runBefore(listener, releasable::close); - final String executorName = isOnlySystem ? Names.SYSTEM_WRITE : Names.WRITE; - ensureClusterStateThenForkAndExecute(task, bulkRequest, executorName, releasingListener); + final Executor executor = isOnlySystem ? systemWriteExecutor : writeExecutor; + ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); } private void ensureClusterStateThenForkAndExecute( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener releasingListener ) { final ClusterState initialState = clusterService.state(); @@ -274,7 +280,7 @@ private void ensureClusterStateThenForkAndExecute( clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } @Override @@ -288,20 +294,20 @@ public void onTimeout(TimeValue timeout) { } }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)); } else { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } } - private void forkAndExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener releasingListener) { - threadPool.executor(executorName).execute(new ActionRunnable<>(releasingListener) { + private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener releasingListener) { + executor.execute(new ActionRunnable<>(releasingListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, releasingListener); + doInternalExecute(task, bulkRequest, executor, releasingListener); } }); } - protected void doInternalExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener listener) { + protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) { final long startTime = relativeTime(); boolean hasIndexRequestsWithPipelines = false; @@ -334,7 +340,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec assert arePipelinesResolved : bulkRequest; } if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, executorName, metadata, l); + processBulkIndexIngestRequest(task, bulkRequest, executor, metadata, l); } else { ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); } @@ -385,7 +391,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec createMissingIndicesAndIndexData( task, bulkRequest, - executorName, + executor, listener, indicesToAutoCreate, dataStreamsToBeRolledOver, @@ -401,7 +407,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToBeRolledOver, @@ -411,13 +417,13 @@ protected void createMissingIndicesAndIndexData( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); return; } - Runnable executeBulkRunnable = () -> threadPool.executor(executorName).execute(new ActionRunnable<>(listener) { + Runnable executeBulkRunnable = () -> executor.execute(new ActionRunnable<>(listener) { @Override protected void doRun() { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { @@ -636,14 +642,14 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { new BulkOperation( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -663,7 +669,7 @@ private long relativeTime() { private void processBulkIndexIngestRequest( Task task, BulkRequest original, - String executorName, + Executor executor, Metadata metadata, ActionListener listener ) { @@ -696,7 +702,7 @@ private void processBulkIndexIngestRequest( ActionRunnable runnable = new ActionRunnable<>(actionListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, actionListener); + doInternalExecute(task, bulkRequest, executor, actionListener); } @Override @@ -713,12 +719,12 @@ public boolean isForceExecution() { if (originalThread == Thread.currentThread()) { runnable.run(); } else { - threadPool.executor(executorName).execute(runnable); + executor.execute(runnable); } } } }, - executorName + executor ); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 265719b4738c0..39de11d39bc34 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -487,7 +487,7 @@ private static void onComplete( final BulkItemResponse executionResult = context.getExecutionResult(); final boolean isFailed = executionResult.isFailed(); if (isFailed == false && opType != DocWriteRequest.OpType.DELETE) { - DocumentSizeReporter documentSizeReporter = documentParsingProvider.getDocumentParsingReporter(); + DocumentSizeReporter documentSizeReporter = documentParsingProvider.getDocumentParsingReporter(docWriteRequest.index()); DocumentSizeObserver documentSizeObserver = context.getDocumentSizeObserver(); documentSizeReporter.onCompleted(docWriteRequest.index(), documentSizeObserver.normalisedBytesParsed()); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index f65d0f462fde6..1b3949f3c00ac 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -30,6 +30,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; public class TransportSimulateBulkAction extends TransportBulkAction { @Inject @@ -70,7 +71,7 @@ public TransportSimulateBulkAction( protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToRollover, diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index d931302740f19..d38f5b0439f84 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; @@ -247,7 +248,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Opens one or more indices based on their index name. * - * @param indices The name of the indices to close + * @param indices The name of the indices to open */ OpenIndexRequestBuilder prepareOpen(String... indices); @@ -371,7 +372,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The index aliases request * @return The result future */ - ActionFuture aliases(IndicesAliasesRequest request); + ActionFuture aliases(IndicesAliasesRequest request); /** * Allows to add/remove aliases from indices. @@ -379,7 +380,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The index aliases request * @param listener A listener to be notified with a result */ - void aliases(IndicesAliasesRequest request, ActionListener listener); + void aliases(IndicesAliasesRequest request, ActionListener listener); /** * Allows to add/remove aliases from indices. diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 76073696b0b27..26a8768a78e78 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -118,6 +118,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -1083,12 +1084,12 @@ public ThreadPool threadPool() { } @Override - public ActionFuture aliases(final IndicesAliasesRequest request) { + public ActionFuture aliases(final IndicesAliasesRequest request) { return execute(TransportIndicesAliasesAction.TYPE, request); } @Override - public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { + public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { execute(TransportIndicesAliasesAction.TYPE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index fc7eaa97c677b..156ba88a7d2b1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection; +import org.elasticsearch.cluster.coordination.ElectionStrategy.NodeEligibility; import org.elasticsearch.cluster.coordination.FollowersChecker.FollowerCheckRequest; import org.elasticsearch.cluster.coordination.JoinHelper.InitialJoinAccumulator; import org.elasticsearch.cluster.metadata.Metadata; @@ -544,8 +545,14 @@ private void startElection() { // The preVoteCollector is only active while we are candidate, but it does not call this method with synchronisation, so we have // to check our mode again here. if (mode == Mode.CANDIDATE) { - if (localNodeMayWinElection(getLastAcceptedState(), electionStrategy) == false) { - logger.trace("skip election as local node may not win it: {}", getLastAcceptedState().coordinationMetadata()); + final var nodeEligibility = localNodeMayWinElection(getLastAcceptedState(), electionStrategy); + if (nodeEligibility.mayWin() == false) { + assert nodeEligibility.reason().isEmpty() == false; + logger.trace( + "skip election as local node may not win it ({}): {}", + nodeEligibility.reason(), + getLastAcceptedState().coordinationMetadata() + ); return; } @@ -598,7 +605,7 @@ private void abdicateTo(DiscoveryNode newMaster) { becomeCandidate("after abdicating to " + newMaster); } - private static boolean localNodeMayWinElection(ClusterState lastAcceptedState, ElectionStrategy electionStrategy) { + private static NodeEligibility localNodeMayWinElection(ClusterState lastAcceptedState, ElectionStrategy electionStrategy) { final DiscoveryNode localNode = lastAcceptedState.nodes().getLocalNode(); assert localNode != null; return electionStrategy.nodeMayWinElection(lastAcceptedState, localNode); @@ -1283,8 +1290,12 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura metadataBuilder.coordinationMetadata(coordinationMetadata); coordinationState.get().setInitialState(ClusterState.builder(currentState).metadata(metadataBuilder).build()); - assert localNodeMayWinElection(getLastAcceptedState(), electionStrategy) - : "initial state does not allow local node to win election: " + getLastAcceptedState().coordinationMetadata(); + var nodeEligibility = localNodeMayWinElection(getLastAcceptedState(), electionStrategy); + assert nodeEligibility.mayWin() + : "initial state does not allow local node to win election, reason: " + + nodeEligibility.reason() + + " , metadata: " + + getLastAcceptedState().coordinationMetadata(); preVoteCollector.update(getPreVoteResponse(), null); // pick up the change to last-accepted version startElectionScheduler(); return true; @@ -1767,9 +1778,14 @@ public void run() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); - - if (localNodeMayWinElection(lastAcceptedState, electionStrategy) == false) { - logger.trace("skip prevoting as local node may not win election: {}", lastAcceptedState.coordinationMetadata()); + final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); + if (nodeEligibility.mayWin() == false) { + assert nodeEligibility.reason().isEmpty() == false; + logger.trace( + "skip prevoting as local node may not win election ({}): {}", + nodeEligibility.reason(), + lastAcceptedState.coordinationMetadata() + ); return; } @@ -1983,10 +1999,10 @@ public void onResponse(Void ignored) { // if necessary, abdicate to another node or improve the voting configuration boolean attemptReconfiguration = true; final ClusterState state = getLastAcceptedState(); // committed state - if (localNodeMayWinElection(state, electionStrategy) == false) { + if (localNodeMayWinElection(state, electionStrategy).mayWin() == false) { final List masterCandidates = completedNodes().stream() .filter(DiscoveryNode::isMasterNode) - .filter(node -> electionStrategy.nodeMayWinElection(state, node)) + .filter(node -> electionStrategy.nodeMayWinElection(state, node).mayWin()) .filter(node -> { // check if master candidate would be able to get an election quorum if we were to // abdicate to it. Assume that every node that completed the publication can provide diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java index c98d9ec39e0f0..2bf6e10a9855a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java @@ -34,6 +34,17 @@ protected boolean satisfiesAdditionalQuorumConstraints( } }; + /** + * Contains a result for whether a node may win an election and the reason if not. + */ + public record NodeEligibility(boolean mayWin, String reason) {} + + public static final NodeEligibility NODE_MAY_WIN_ELECTION = new NodeEligibility(true, ""); + public static final NodeEligibility NODE_MAY_NOT_WIN_ELECTION = new NodeEligibility( + false, + "node is ineligible for election, not a voting node in the voting configuration" + ); + /** * Whether there is an election quorum from the point of view of the given local node under the provided voting configurations */ @@ -105,10 +116,13 @@ public void beforeCommit(long term, long version, ActionListener listener) listener.onResponse(null); } - public boolean nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { + public NodeEligibility nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { final String nodeId = node.getId(); - return lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) + if (lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) || lastAcceptedState.getLastAcceptedConfiguration().getNodeIds().contains(nodeId) - || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId)); + || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId))) { + return NODE_MAY_WIN_ELECTION; + } + return NODE_MAY_NOT_WIN_ELECTION; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java index 63647e53619fe..533ae3a3ad50d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java @@ -8,10 +8,10 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; /** * Individual operation to perform on the cluster state as part of an {@link IndicesAliasesRequest}. @@ -189,7 +189,7 @@ boolean removeIndex() { boolean apply(NewAliasValidator aliasValidator, Metadata.Builder metadata, IndexMetadata index) { if (false == index.getAliases().containsKey(alias)) { if (mustExist != null && mustExist) { - throw new ResourceNotFoundException("required alias [" + alias + "] does not exist"); + throw new AliasesNotFoundException(alias); } return false; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 364a1b31ceeba..d4fd57427793b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -527,7 +527,7 @@ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) lifecycle, failureStore, failureIndices, - false, + rolloverOnWrite, autoShardingEvent ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 22672756bdaf0..529814e83ba38 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -540,6 +540,8 @@ public Iterator> settings() { public static final String KEY_SHARD_SIZE_FORECAST = "shard_size_forecast"; + public static final String KEY_INFERENCE_FIELDS = "field_inference"; + public static final String INDEX_STATE_FILE_PREFIX = "state-"; static final TransportVersion SYSTEM_INDEX_FLAG_ADDED = TransportVersions.V_7_10_0; @@ -574,6 +576,8 @@ public Iterator> settings() { @Nullable private final MappingMetadata mapping; + private final ImmutableOpenMap inferenceFields; + private final ImmutableOpenMap customData; private final Map> inSyncAllocationIds; @@ -642,6 +646,7 @@ private IndexMetadata( final int numberOfReplicas, final Settings settings, final MappingMetadata mapping, + final ImmutableOpenMap inferenceFields, final ImmutableOpenMap aliases, final ImmutableOpenMap customData, final Map> inSyncAllocationIds, @@ -692,6 +697,7 @@ private IndexMetadata( this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1); this.settings = settings; this.mapping = mapping; + this.inferenceFields = inferenceFields; this.customData = customData; this.aliases = aliases; this.inSyncAllocationIds = inSyncAllocationIds; @@ -748,6 +754,7 @@ IndexMetadata withMappingMetadata(MappingMetadata mapping) { this.numberOfReplicas, this.settings, mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -806,6 +813,7 @@ public IndexMetadata withInSyncAllocationIds(int shardId, Set inSyncSet) this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, Maps.copyMapWithAddedOrReplacedEntry(this.inSyncAllocationIds, shardId, Set.copyOf(inSyncSet)), @@ -862,6 +870,7 @@ public IndexMetadata withIncrementedPrimaryTerm(int shardId) { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -918,6 +927,7 @@ public IndexMetadata withTimestampRange(IndexLongFieldRange timestampRange) { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -970,6 +980,7 @@ public IndexMetadata withIncrementedVersion() { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -1193,6 +1204,10 @@ public MappingMetadata mapping() { return mapping; } + public Map getInferenceFields() { + return inferenceFields; + } + @Nullable public IndexMetadataStats getStats() { return stats; @@ -1403,6 +1418,9 @@ public boolean equals(Object o) { if (rolloverInfos.equals(that.rolloverInfos) == false) { return false; } + if (inferenceFields.equals(that.inferenceFields) == false) { + return false; + } if (isSystem != that.isSystem) { return false; } @@ -1423,6 +1441,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(primaryTerms); result = 31 * result + inSyncAllocationIds.hashCode(); result = 31 * result + rolloverInfos.hashCode(); + result = 31 * result + inferenceFields.hashCode(); result = 31 * result + Boolean.hashCode(isSystem); return result; } @@ -1469,6 +1488,7 @@ private static class IndexMetadataDiff implements Diff { @Nullable private final Diff settingsDiff; private final Diff> mappings; + private final Diff> inferenceFields; private final Diff> aliases; private final Diff> customData; private final Diff>> inSyncAllocationIds; @@ -1500,6 +1520,7 @@ private static class IndexMetadataDiff implements Diff { : ImmutableOpenMap.builder(1).fPut(MapperService.SINGLE_MAPPING_NAME, after.mapping).build(), DiffableUtils.getStringKeySerializer() ); + inferenceFields = DiffableUtils.diff(before.inferenceFields, after.inferenceFields, DiffableUtils.getStringKeySerializer()); aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer()); customData = DiffableUtils.diff(before.customData, after.customData, DiffableUtils.getStringKeySerializer()); inSyncAllocationIds = DiffableUtils.diff( @@ -1524,6 +1545,8 @@ private static class IndexMetadataDiff implements Diff { new DiffableUtils.DiffableValueReader<>(DiffableStringMap::readFrom, DiffableStringMap::readDiffFrom); private static final DiffableUtils.DiffableValueReader ROLLOVER_INFO_DIFF_VALUE_READER = new DiffableUtils.DiffableValueReader<>(RolloverInfo::new, RolloverInfo::readDiffFrom); + private static final DiffableUtils.DiffableValueReader INFERENCE_FIELDS_METADATA_DIFF_VALUE_READER = + new DiffableUtils.DiffableValueReader<>(InferenceFieldMetadata::new, InferenceFieldMetadata::readDiffFrom); IndexMetadataDiff(StreamInput in) throws IOException { index = in.readString(); @@ -1546,6 +1569,15 @@ private static class IndexMetadataDiff implements Diff { } primaryTerms = in.readVLongArray(); mappings = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), MAPPING_DIFF_VALUE_READER); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + inferenceFields = DiffableUtils.readImmutableOpenMapDiff( + in, + DiffableUtils.getStringKeySerializer(), + INFERENCE_FIELDS_METADATA_DIFF_VALUE_READER + ); + } else { + inferenceFields = DiffableUtils.emptyDiff(); + } aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), ALIAS_METADATA_DIFF_VALUE_READER); customData = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_DIFF_VALUE_READER); inSyncAllocationIds = DiffableUtils.readJdkMapDiff( @@ -1595,6 +1627,9 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeVLongArray(primaryTerms); mappings.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + inferenceFields.writeTo(out); + } aliases.writeTo(out); customData.writeTo(out); inSyncAllocationIds.writeTo(out); @@ -1628,6 +1663,7 @@ public IndexMetadata apply(IndexMetadata part) { builder.mapping = mappings.apply( ImmutableOpenMap.builder(1).fPut(MapperService.SINGLE_MAPPING_NAME, part.mapping).build() ).get(MapperService.SINGLE_MAPPING_NAME); + builder.inferenceFields.putAllFromMap(inferenceFields.apply(part.inferenceFields)); builder.aliases.putAllFromMap(aliases.apply(part.aliases)); builder.customMetadata.putAllFromMap(customData.apply(part.customData)); builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds)); @@ -1673,6 +1709,10 @@ public static IndexMetadata readFrom(StreamInput in, @Nullable Function builder.putInferenceField(f)); + } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetadata aliasMd = new AliasMetadata(in); @@ -1733,6 +1773,9 @@ public void writeTo(StreamOutput out, boolean mappingsAsHash) throws IOException mapping.writeTo(out); } } + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + out.writeCollection(inferenceFields.values()); + } out.writeCollection(aliases.values()); out.writeMap(customData, StreamOutput::writeWriteable); out.writeMap( @@ -1788,6 +1831,7 @@ public static class Builder { private long[] primaryTerms = null; private Settings settings = Settings.EMPTY; private MappingMetadata mapping; + private final ImmutableOpenMap.Builder inferenceFields; private final ImmutableOpenMap.Builder aliases; private final ImmutableOpenMap.Builder customMetadata; private final Map> inSyncAllocationIds; @@ -1802,6 +1846,7 @@ public static class Builder { public Builder(String index) { this.index = index; + this.inferenceFields = ImmutableOpenMap.builder(); this.aliases = ImmutableOpenMap.builder(); this.customMetadata = ImmutableOpenMap.builder(); this.inSyncAllocationIds = new HashMap<>(); @@ -1819,6 +1864,7 @@ public Builder(IndexMetadata indexMetadata) { this.settings = indexMetadata.getSettings(); this.primaryTerms = indexMetadata.primaryTerms.clone(); this.mapping = indexMetadata.mapping; + this.inferenceFields = ImmutableOpenMap.builder(indexMetadata.inferenceFields); this.aliases = ImmutableOpenMap.builder(indexMetadata.aliases); this.customMetadata = ImmutableOpenMap.builder(indexMetadata.customData); this.routingNumShards = indexMetadata.routingNumShards; @@ -2059,6 +2105,16 @@ public Builder shardSizeInBytesForecast(Long shardSizeInBytesForecast) { return this; } + public Builder putInferenceField(InferenceFieldMetadata value) { + this.inferenceFields.put(value.getName(), value); + return this; + } + + public Builder putInferenceFields(Map values) { + this.inferenceFields.putAllFromMap(values); + return this; + } + public IndexMetadata build() { return build(false); } @@ -2221,6 +2277,7 @@ IndexMetadata build(boolean repair) { numberOfReplicas, settings, mapping, + inferenceFields.build(), aliasesMap, newCustomMetadata, Map.ofEntries(denseInSyncAllocationIds), @@ -2379,6 +2436,14 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build builder.field(KEY_SHARD_SIZE_FORECAST, indexMetadata.shardSizeInBytesForecast); } + if (indexMetadata.getInferenceFields().isEmpty() == false) { + builder.startObject(KEY_INFERENCE_FIELDS); + for (InferenceFieldMetadata field : indexMetadata.getInferenceFields().values()) { + field.toXContent(builder, params); + } + builder.endObject(); + } + builder.endObject(); } @@ -2456,6 +2521,11 @@ public static IndexMetadata fromXContent(XContentParser parser, Map, ToXContentFragment { + private static final String INFERENCE_ID_FIELD = "inference_id"; + private static final String SOURCE_FIELDS_FIELD = "source_fields"; + + private final String name; + private final String inferenceId; + private final String[] sourceFields; + + public InferenceFieldMetadata(String name, String inferenceId, String[] sourceFields) { + this.name = Objects.requireNonNull(name); + this.inferenceId = Objects.requireNonNull(inferenceId); + this.sourceFields = Objects.requireNonNull(sourceFields); + } + + public InferenceFieldMetadata(StreamInput input) throws IOException { + this.name = input.readString(); + this.inferenceId = input.readString(); + this.sourceFields = input.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(inferenceId); + out.writeStringArray(sourceFields); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceFieldMetadata that = (InferenceFieldMetadata) o; + return Objects.equals(name, that.name) + && Objects.equals(inferenceId, that.inferenceId) + && Arrays.equals(sourceFields, that.sourceFields); + } + + @Override + public int hashCode() { + int result = Objects.hash(name, inferenceId); + result = 31 * result + Arrays.hashCode(sourceFields); + return result; + } + + public String getName() { + return name; + } + + public String getInferenceId() { + return inferenceId; + } + + public String[] getSourceFields() { + return sourceFields; + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return SimpleDiffable.readDiffFrom(InferenceFieldMetadata::new, in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(name); + builder.field(INFERENCE_ID_FIELD, inferenceId); + builder.array(SOURCE_FIELDS_FIELD, sourceFields); + return builder.endObject(); + } + + public static InferenceFieldMetadata fromXContent(XContentParser parser) throws IOException { + final String name = parser.currentName(); + + XContentParser.Token token = parser.nextToken(); + Objects.requireNonNull(token, "Expected InferenceFieldMetadata but got EOF"); + + String currentFieldName = null; + String inferenceId = null; + List inputFields = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.VALUE_STRING) { + if (INFERENCE_ID_FIELD.equals(currentFieldName)) { + inferenceId = parser.text(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (SOURCE_FIELDS_FIELD.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + inputFields.add(parser.text()); + } else { + parser.skipChildren(); + } + } + } + } else { + parser.skipChildren(); + } + } + return new InferenceFieldMetadata(name, inferenceId, inputFields.toArray(String[]::new)); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index da24f0b9d0dc5..52642e1de8ac9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1263,10 +1263,11 @@ static IndexMetadata buildIndexMetadata( indexMetadataBuilder.system(isSystem); // now, update the mappings with the actual source Map mappingsMetadata = new HashMap<>(); - DocumentMapper mapper = documentMapperSupplier.get(); - if (mapper != null) { - MappingMetadata mappingMd = new MappingMetadata(mapper); - mappingsMetadata.put(mapper.type(), mappingMd); + DocumentMapper docMapper = documentMapperSupplier.get(); + if (docMapper != null) { + MappingMetadata mappingMd = new MappingMetadata(docMapper); + mappingsMetadata.put(docMapper.type(), mappingMd); + indexMetadataBuilder.putInferenceFields(docMapper.mappers().inferenceFields()); } for (MappingMetadata mappingMd : mappingsMetadata.values()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java index fb5acbdd2ac49..d9cd1a7725ca8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateAckListener; import org.elasticsearch.cluster.ClusterStateTaskExecutor; @@ -79,7 +79,10 @@ public Tuple executeTask(ApplyAliasesTask this.taskQueue = clusterService.createTaskQueue("index-aliases", Priority.URGENT, this.executor); } - public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, final ActionListener listener) { + public void indicesAliases( + final IndicesAliasesClusterStateUpdateRequest request, + final ActionListener listener + ) { taskQueue.submitTask("index-aliases", new ApplyAliasesTask(request, listener), null); // TODO use request.masterNodeTimeout() here? } @@ -254,7 +257,7 @@ private static void validateAliasTargetIsNotDSBackingIndex(ClusterState currentS /** * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. */ - record ApplyAliasesTask(IndicesAliasesClusterStateUpdateRequest request, ActionListener listener) + record ApplyAliasesTask(IndicesAliasesClusterStateUpdateRequest request, ActionListener listener) implements ClusterStateTaskListener, ClusterStateAckListener { @@ -271,17 +274,17 @@ public boolean mustAck(DiscoveryNode discoveryNode) { @Override public void onAllNodesAcked() { - listener.onResponse(AcknowledgedResponse.TRUE); + listener.onResponse(IndicesAliasesResponse.build(request.getActionResults())); } @Override public void onAckFailure(Exception e) { - listener.onResponse(AcknowledgedResponse.FALSE); + listener.onResponse(IndicesAliasesResponse.NOT_ACKNOWLEDGED); } @Override public void onAckTimeout() { - listener.onResponse(AcknowledgedResponse.FALSE); + listener.onResponse(IndicesAliasesResponse.NOT_ACKNOWLEDGED); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 3ca206eaddb28..4e714b96f64c7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -193,9 +193,10 @@ private static ClusterState applyRequest( IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexMetadata); // Mapping updates on a single type may have side-effects on other types so we need to // update mapping metadata on all types - DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - indexMetadataBuilder.putMapping(new MappingMetadata(mapper)); + DocumentMapper docMapper = mapperService.documentMapper(); + if (docMapper != null) { + indexMetadataBuilder.putMapping(new MappingMetadata(docMapper)); + indexMetadataBuilder.putInferenceFields(docMapper.mappers().inferenceFields()); } if (updatedMapping) { indexMetadataBuilder.mappingVersion(1 + indexMetadataBuilder.mappingVersion()); diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java index bd97ec0c2f63f..056fc59b4fdd5 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java @@ -58,7 +58,7 @@ protected void writeTo(StreamOutput out, Writer nestedExceptionsWrite } @Override - protected String getExceptionName() { + public String getExceptionName() { return name; } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 5fcb4684d3f8d..14c1d1e9ef6aa 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -33,6 +33,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +/** + * A collection of static methods to help create different ES Executor types. + */ public class EsExecutors { // although the available processors may technically change, for node sizing we use the number available at launch diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index d4771ba74e0fb..3ebcd1cb5b420 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -11,7 +11,9 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; import java.util.HashMap; @@ -25,22 +27,40 @@ */ public class CodecService { + public static final FeatureFlag ZSTD_STORED_FIELDS_FEATURE_FLAG = new FeatureFlag("zstd_stored_fields"); + private final Map codecs; public static final String DEFAULT_CODEC = "default"; + public static final String LEGACY_DEFAULT_CODEC = "legacy_default"; // escape hatch public static final String BEST_COMPRESSION_CODEC = "best_compression"; + public static final String LEGACY_BEST_COMPRESSION_CODEC = "legacy_best_compression"; // escape hatch + /** the raw unfiltered lucene default. useful for testing */ public static final String LUCENE_DEFAULT_CODEC = "lucene_default"; public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene99Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Lucene99Codec.Mode.BEST_COMPRESSION)); + + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays)); - codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); + codecs.put(DEFAULT_CODEC, legacyBestSpeedCodec); } + codecs.put(LEGACY_DEFAULT_CODEC, legacyBestSpeedCodec); + + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put( + BEST_COMPRESSION_CODEC, + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) + ); + } else { + codecs.put(BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + } + codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { codecs.put(codec, Codec.forName(codec)); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java new file mode 100644 index 0000000000000..e85e05c87b083 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 8.14. This extends the Lucene 9.9 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch814Codec extends FilterCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch814Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch814Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch814Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch814Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch814", new Lucene99Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene99PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene99". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene99". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene95". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java new file mode 100644 index 0000000000000..a682d26b094e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.mapper.MapperService; + +/** + * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new + * changes in {@link PerFieldMapperCodec}. + */ +public final class LegacyPerFieldMapperCodec extends Lucene99Codec { + + private final PerFieldFormatSupplier formatSupplier; + + public LegacyPerFieldMapperCodec(Lucene99Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + super(compressionMode); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == getClass().getSuperclass() + : "LegacyPerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; + } + + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return formatSupplier.getPostingsFormatForField(field); + } + + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return formatSupplier.getKnnVectorsFormatForField(field); + } + + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return formatSupplier.getDocValuesFormatForField(field); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java new file mode 100644 index 0000000000000..81fc2c0b4a065 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; +import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; + +import java.util.Objects; + +/** + * Class that encapsulates the logic of figuring out the most appropriate file format for a given field, across postings, doc values and + * vectors. + */ +public class PerFieldFormatSupplier { + + private final MapperService mapperService; + private final BigArrays bigArrays; + private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); + private final KnnVectorsFormat knnVectorsFormat = new Lucene99HnswVectorsFormat(); + private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; + private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + + private final ES812PostingsFormat es812PostingsFormat; + + public PerFieldFormatSupplier(MapperService mapperService, BigArrays bigArrays) { + this.mapperService = mapperService; + this.bigArrays = Objects.requireNonNull(bigArrays); + this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); + this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); + this.es812PostingsFormat = new ES812PostingsFormat(); + } + + public PostingsFormat getPostingsFormatForField(String field) { + if (useBloomFilter(field)) { + return bloomFilterPostingsFormat; + } + return internalGetPostingsFormatForField(field); + } + + private PostingsFormat internalGetPostingsFormatForField(String field) { + if (mapperService != null) { + final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); + if (format != null) { + return format; + } + } + // return our own posting format using PFOR + return es812PostingsFormat; + } + + boolean useBloomFilter(String field) { + if (mapperService == null) { + return false; + } + IndexSettings indexSettings = mapperService.getIndexSettings(); + if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { + // In case for time series indices, the _id isn't randomly generated, + // but based on dimension fields and timestamp field, so during indexing + // version/seq_no/term needs to be looked up and having a bloom filter + // can speed this up significantly. + return indexSettings.getMode() == IndexMode.TIME_SERIES + && IdFieldMapper.NAME.equals(field) + && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } else { + return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } + } + + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + if (mapperService != null) { + Mapper mapper = mapperService.mappingLookup().getMapper(field); + if (mapper instanceof DenseVectorFieldMapper vectorMapper) { + return vectorMapper.getKnnVectorsFormatForField(knnVectorsFormat); + } + } + return knnVectorsFormat; + } + + public DocValuesFormat getDocValuesFormatForField(String field) { + if (useTSDBDocValuesFormat(field)) { + return tsdbDocValuesFormat; + } + return docValuesFormat; + } + + boolean useTSDBDocValuesFormat(final String field) { + if (excludeFields(field)) { + return false; + } + + return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); + } + + private boolean excludeFields(String fieldName) { + // Avoid using tsdb codec for fields like _seq_no, _primary_term. + // But _tsid and _ts_routing_hash should always use the tsdb codec. + return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; + } + + private boolean isTimeSeriesModeIndex() { + return mapperService != null && IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index ae497af887d9c..6f88578260db3 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -12,19 +12,10 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; -import org.elasticsearch.index.codec.postings.ES812PostingsFormat; -import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; /** * {@link PerFieldMapperCodec This Lucene codec} provides the default @@ -34,93 +25,32 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Lucene99Codec { +public final class PerFieldMapperCodec extends Elasticsearch814Codec { - private final MapperService mapperService; - private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); - private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; - private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + private final PerFieldFormatSupplier formatSupplier; - private final ES812PostingsFormat es812PostingsFormat; - - static { - assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMapperCodec.class) - : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; - } - - public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); - this.mapperService = mapperService; - this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); - this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); - this.es812PostingsFormat = new ES812PostingsFormat(); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == delegate.getClass() + : "PerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; } @Override public PostingsFormat getPostingsFormatForField(String field) { - if (useBloomFilter(field)) { - return bloomFilterPostingsFormat; - } - return internalGetPostingsFormatForField(field); - } - - private PostingsFormat internalGetPostingsFormatForField(String field) { - final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); - if (format != null) { - return format; - } - // return our own posting format using PFOR - return es812PostingsFormat; - } - - boolean useBloomFilter(String field) { - IndexSettings indexSettings = mapperService.getIndexSettings(); - if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { - // In case for time series indices, they _id isn't randomly generated, - // but based on dimension fields and timestamp field, so during indexing - // version/seq_no/term needs to be looked up and having a bloom filter - // can speed this up significantly. - return indexSettings.getMode() == IndexMode.TIME_SERIES - && IdFieldMapper.NAME.equals(field) - && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } else { - return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } + return formatSupplier.getPostingsFormatForField(field); } @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - Mapper mapper = mapperService.mappingLookup().getMapper(field); - if (mapper instanceof DenseVectorFieldMapper vectorMapper) { - return vectorMapper.getKnnVectorsFormatForField(super.getKnnVectorsFormatForField(field)); - } - return super.getKnnVectorsFormatForField(field); + return formatSupplier.getKnnVectorsFormatForField(field); } @Override public DocValuesFormat getDocValuesFormatForField(String field) { - if (useTSDBDocValuesFormat(field)) { - return tsdbDocValuesFormat; - } - return docValuesFormat; - } - - boolean useTSDBDocValuesFormat(final String field) { - if (excludeFields(field)) { - return false; - } - - return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); - } - - private boolean excludeFields(String fieldName) { - // Avoid using tsdb codec for fields like _seq_no, _primary_term. - // But _tsid and _ts_routing_hash should always use the tsdb codec. - return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; - } - - private boolean isTimeSeriesModeIndex() { - return IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + return formatSupplier.getDocValuesFormatForField(field); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java new file mode 100644 index 0000000000000..b827bb6436f07 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.StoredFieldsWriter; +import org.apache.lucene.codecs.compressing.CompressionMode; +import org.apache.lucene.codecs.compressing.Compressor; +import org.apache.lucene.codecs.compressing.Decompressor; +import org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingStoredFieldsFormat; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.store.ByteBuffersDataInput; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; +import org.elasticsearch.nativeaccess.NativeAccess; +import org.elasticsearch.nativeaccess.Zstd; + +import java.io.IOException; + +/** + * {@link org.apache.lucene.codecs.StoredFieldsFormat} that compresses blocks of data using ZStandard. + * + * Unlike Lucene's default stored fields format, this format does not make use of dictionaries (even though ZStandard has great support for + * dictionaries!). This is mostly due to the fact that LZ4/DEFLATE have short sliding windows that they can use to find duplicate strings + * (64kB and 32kB respectively). In contrast, ZSTD doesn't have such a limitation and can better take advantage of large compression + * buffers. + */ +public final class Zstd814StoredFieldsFormat extends Lucene90CompressingStoredFieldsFormat { + + // ZSTD has special optimizations for inputs that are less than 16kB and less than 256kB. So subtract a bit of memory from 16kB and + // 256kB to make our inputs unlikely to grow beyond 16kB for BEST_SPEED and 256kB for BEST_COMPRESSION. + private static final int BEST_SPEED_BLOCK_SIZE = (16 - 2) * 1_024; + private static final int BEST_COMPRESSION_BLOCK_SIZE = (256 - 16) * 1_024; + + /** Attribute key for compression mode. */ + public static final String MODE_KEY = Zstd814StoredFieldsFormat.class.getSimpleName() + ".mode"; + + public enum Mode { + BEST_SPEED(0, BEST_SPEED_BLOCK_SIZE, 128), + BEST_COMPRESSION(3, BEST_COMPRESSION_BLOCK_SIZE, 2048); + + final int level, blockSizeInBytes, blockDocCount; + + Mode(int level, int blockSizeInBytes, int blockDocCount) { + this.level = level; + this.blockSizeInBytes = blockSizeInBytes; + this.blockDocCount = blockDocCount; + } + } + + private final Mode mode; + + public Zstd814StoredFieldsFormat(Mode mode) { + super("ZstdStoredFields814", new ZstdCompressionMode(mode.level), mode.blockSizeInBytes, mode.blockDocCount, 10); + this.mode = mode; + } + + @Override + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { + // Both modes are compatible, we only put an attribute for debug purposes. + String previous = si.putAttribute(MODE_KEY, mode.name()); + if (previous != null && previous.equals(mode.name()) == false) { + throw new IllegalStateException( + "found existing value for " + MODE_KEY + " for segment: " + si.name + "old=" + previous + ", new=" + mode.name() + ); + } + return super.fieldsWriter(directory, si, context); + } + + private static class ZstdCompressionMode extends CompressionMode { + private final int level; + + ZstdCompressionMode(int level) { + this.level = level; + } + + @Override + public Compressor newCompressor() { + return new ZstdCompressor(level); + } + + @Override + public Decompressor newDecompressor() { + return new ZstdDecompressor(); + } + + @Override + public String toString() { + return "ZSTD(level=" + level + ")"; + } + } + + private static final class ZstdDecompressor extends Decompressor { + + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdDecompressor() {} + + @Override + public void decompress(DataInput in, int originalLength, int offset, int length, BytesRef bytes) throws IOException { + if (originalLength == 0) { + bytes.offset = 0; + bytes.length = 0; + return; + } + + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int compressedLength = in.readVInt(); + + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(compressedLength); + CloseableByteBuffer dest = nativeAccess.newBuffer(originalLength) + ) { + + while (src.buffer().position() < compressedLength) { + final int numBytes = Math.min(copyBuffer.length, compressedLength - src.buffer().position()); + in.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int decompressedLen = zstd.decompress(dest, src); + if (decompressedLen != originalLength) { + throw new CorruptIndexException("Expected " + originalLength + " decompressed bytes, got " + decompressedLen, in); + } + + bytes.bytes = ArrayUtil.growNoCopy(bytes.bytes, length); + dest.buffer().get(offset, bytes.bytes, 0, length); + bytes.offset = 0; + bytes.length = length; + } + } + + @Override + public Decompressor clone() { + return new ZstdDecompressor(); + } + } + + private static class ZstdCompressor extends Compressor { + + final int level; + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdCompressor(int level) { + this.level = level; + } + + @Override + public void compress(ByteBuffersDataInput buffersInput, DataOutput out) throws IOException { + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int srcLen = Math.toIntExact(buffersInput.length()); + if (srcLen == 0) { + return; + } + + final int compressBound = zstd.compressBound(srcLen); + + // NOTE: We are allocating/deallocating native buffers on each call. We could save allocations by reusing these buffers, though + // this would come at the expense of higher permanent memory usage. Benchmarks suggested that there is some performance to save + // there, but it wouldn't be a game changer either. + // Also note that calls to #compress implicitly allocate memory under the hood for e.g. hash tables and chain tables that help + // identify duplicate strings. So if we wanted to avoid allocating memory on every compress call, we should also look into + // reusing compression contexts, which are not small and would increase permanent memory usage as well. + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(srcLen); + CloseableByteBuffer dest = nativeAccess.newBuffer(compressBound) + ) { + + while (buffersInput.position() < buffersInput.length()) { + final int numBytes = Math.min(copyBuffer.length, (int) (buffersInput.length() - buffersInput.position())); + buffersInput.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int compressedLen = zstd.compress(dest, src, level); + out.writeVInt(compressedLen); + + for (int written = 0; written < compressedLen;) { + final int numBytes = Math.min(copyBuffer.length, compressedLen - written); + dest.buffer().get(copyBuffer, 0, numBytes); + out.writeBytes(copyBuffer, 0, numBytes); + written += numBytes; + assert written == dest.buffer().position(); + } + } + } + + @Override + public void close() throws IOException {} + } +} diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java index f3dcda813a39d..bc83f85edcf7d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class BooleanScriptLeafFieldData extends LeafLongFieldData { private final BooleanScriptDocValues booleanScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java index 1199583f89766..a9fdf72e23a31 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DateScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java index 9307233f99161..e08a62eee8fb0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java @@ -89,6 +89,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DoubleScriptLeafFieldData extends LeafDoubleFieldData { private final DoubleScriptDocValues doubleScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java index dce94649e0088..391e9e285807f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java @@ -121,6 +121,7 @@ public final SortField sortField( case LONG: case DOUBLE: // longs, doubles and dates use the same type for doc-values and points. + sortField.setOptimizeSortWithPoints(isIndexed()); break; default: @@ -132,12 +133,18 @@ public final SortField sortField( } /** - * Does {@link #sortField} require a custom comparator because of the way - * the data is stored in doc values ({@code true}) or are the docs values - * stored such that they can be sorted without decoding ({@code false}). + * Should sorting use a custom comparator source vs. rely on a Lucene {@link SortField}. Using a Lucene {@link SortField} when possible + * is important because index sorting cannot be configured with a custom comparator, and because it gives better performance by + * dynamically pruning irrelevant hits. On the other hand, Lucene {@link SortField}s are less flexible and make stronger assumptions + * about how the data is indexed. Therefore, they cannot be used in all cases. */ protected abstract boolean sortRequiresCustomComparator(); + /** + * Return true if, and only if the field is indexed with points that match the content of doc values. + */ + protected abstract boolean isIndexed(); + @Override public final SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { return sortField(getNumericType(), missingValue, sortMode, nested, reverse); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java index 6be5eb9514918..85850b530a1de 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java @@ -92,6 +92,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class LongScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java index a1686344b9309..b7654dfa5569f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java @@ -42,22 +42,25 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -65,18 +68,21 @@ public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedDoublesIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint(); this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -94,6 +100,11 @@ protected boolean sortRequiresCustomComparator() { return numericType == NumericType.HALF_FLOAT; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return numericType; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java index c2507dd2470a5..9c871ac822625 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -42,26 +42,34 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; - public Builder(String name, NumericType numericType, ToScriptFieldFactory toScriptFieldFactory) { - this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory); + public Builder( + String name, + NumericType numericType, + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed + ) { + this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory, indexed); } public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -69,18 +77,21 @@ public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedNumericIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint() == false; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -98,6 +109,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override protected XFieldComparatorSource dateComparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) { if (numericType == NumericType.DATE_NANOSECONDS) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 968c48abc54d8..f07cd1cc32076 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -276,7 +276,9 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext } if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new); + // boolean fields are indexed, but not with points + boolean indexed = false; + return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new, indexed); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 1b926734c1713..3092ed1e827df 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -795,7 +795,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext return new SortedNumericIndexFieldData.Builder( name(), resolution.numericType(), - resolution.getDefaultToScriptFieldFactory() + resolution.getDefaultToScriptFieldFactory(), + isIndexed() ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java new file mode 100644 index 0000000000000..2b0833c72021b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.inference.InferenceService; + +import java.util.Set; + +/** + * Field mapper that requires to transform its input before indexation through the {@link InferenceService}. + */ +public interface InferenceFieldMapper { + + /** + * Retrieve the inference metadata associated with this mapper. + * + * @param sourcePaths The source path that populates the input for the field (before inference) + */ + InferenceFieldMetadata getMetadata(Set sourcePaths); +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 673593cc6e240..bf879f30e5a29 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -10,9 +10,11 @@ import org.apache.lucene.codecs.PostingsFormat; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.inference.InferenceService; import java.util.ArrayList; import java.util.Collection; @@ -47,6 +49,7 @@ private CacheKey() {} /** Full field name to mapper */ private final Map fieldMappers; private final Map objectMappers; + private final Map inferenceFields; private final int runtimeFieldMappersCount; private final NestedLookup nestedLookup; private final FieldTypeLookup fieldTypeLookup; @@ -84,12 +87,12 @@ private static void collect( Collection fieldMappers, Collection fieldAliasMappers ) { - if (mapper instanceof ObjectMapper) { - objectMappers.add((ObjectMapper) mapper); - } else if (mapper instanceof FieldMapper) { - fieldMappers.add((FieldMapper) mapper); - } else if (mapper instanceof FieldAliasMapper) { - fieldAliasMappers.add((FieldAliasMapper) mapper); + if (mapper instanceof ObjectMapper objectMapper) { + objectMappers.add(objectMapper); + } else if (mapper instanceof FieldMapper fieldMapper) { + fieldMappers.add(fieldMapper); + } else if (mapper instanceof FieldAliasMapper fieldAliasMapper) { + fieldAliasMappers.add(fieldAliasMapper); } else { throw new IllegalStateException("Unrecognized mapper type [" + mapper.getClass().getSimpleName() + "]."); } @@ -174,6 +177,15 @@ private MappingLookup( final Collection runtimeFields = mapping.getRoot().runtimeFields(); this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers, runtimeFields); + + Map inferenceFields = new HashMap<>(); + for (FieldMapper mapper : mappers) { + if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) { + inferenceFields.put(mapper.name(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.name()))); + } + } + this.inferenceFields = Map.copyOf(inferenceFields); + if (runtimeFields.isEmpty()) { // without runtime fields this is the same as the field type lookup this.indexTimeLookup = fieldTypeLookup; @@ -360,6 +372,13 @@ public Map objectMappers() { return objectMappers; } + /** + * Returns a map containing all fields that require to run inference (through the {@link InferenceService} prior to indexation. + */ + public Map inferenceFields() { + return inferenceFields; + } + public NestedLookup nestedLookup() { return nestedLookup; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 1f7a3bf2106ae..ebb6672cbab18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -403,8 +403,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, HalfFloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + HalfFloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -577,8 +583,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, FloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + FloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -717,8 +729,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, DoubleDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + DoubleDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -831,8 +849,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ByteDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ByteDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -930,8 +954,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ShortDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ShortDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1097,8 +1127,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, IntegerDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + IntegerDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1234,8 +1270,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, LongDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + LongDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1494,7 +1536,7 @@ public static Query longRangeQuery( return builder.apply(l, u); } - public abstract IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType); + public abstract IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType); public IndexFieldData.Builder getValueFetcherFieldDataBuilder( String name, @@ -1693,7 +1735,7 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext : type.numericType.getValuesSourceType(); if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return type.getFieldDataBuilder(name(), valuesSourceType); + return type.getFieldDataBuilder(this, valuesSourceType); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index 2635c1c11be8e..a46a310d0770f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -216,7 +216,7 @@ public Query rangeQuery( @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new, isIndexed()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index 0d4f5562d3046..1d4f56b02ed74 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -66,7 +66,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new, isIndexed()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index 6532abed19044..7b1e20a6cdda3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -44,7 +44,7 @@ public class SparseVectorFieldMapper extends FieldMapper { static final String ERROR_MESSAGE_7X = "[sparse_vector] field type in old 7.x indices is allowed to " + "contain [sparse_vector] fields, but they cannot be indexed or searched."; - static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported from 8.0 to 8.10 versions."; + static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10."; static final IndexVersion PREVIOUS_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.V_8_0_0; static final IndexVersion NEW_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.NEW_SPARSE_VECTOR; diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index f406684c50948..be1906ab8d05e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -83,6 +83,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -696,7 +697,7 @@ private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exce * @param onCompletion A callback executed once all documents have been processed. Accepts the thread * that ingestion completed on or an exception in the event that the entire operation * has failed. - * @param executorName Which executor the bulk request should be executed on. + * @param executor Which executor the bulk request should be executed on. */ public void executeBulkRequest( final int numberOfActionRequests, @@ -706,11 +707,11 @@ public void executeBulkRequest( final TriConsumer onStoreFailure, final BiConsumer onFailure, final BiConsumer onCompletion, - final String executorName + final Executor executor ) { assert numberOfActionRequests > 0 : "numberOfActionRequests must be greater than 0 but was [" + numberOfActionRequests + "]"; - threadPool.executor(executorName).execute(new AbstractRunnable() { + executor.execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java index 946cd97968e22..329f3d704e50b 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java @@ -19,7 +19,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return DocumentSizeReporter.EMPTY_INSTANCE; } @@ -42,6 +42,6 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar /** * @return an instance of a reporter to use when parsing has been completed and indexing successful */ - DocumentSizeReporter getDocumentParsingReporter(); + DocumentSizeReporter getDocumentParsingReporter(String indexName); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 41e849b4d2ebd..5a33a958646df 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -680,7 +680,7 @@ protected BlobStore getBlobStore() { * maintains single lazy instance of {@link BlobContainer} */ protected BlobContainer blobContainer() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (lifecycle.started() == false) { throw notStartedException(); @@ -705,7 +705,7 @@ protected BlobContainer blobContainer() { * Public for testing. */ public BlobStore blobStore() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); BlobStore store = blobStore.get(); if (store == null) { @@ -1994,7 +1994,7 @@ public long getRestoreThrottleTimeInNanos() { return restoreRateLimitingTimeInNanos.count(); } - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // The Stateless plugin adds custom thread pools for object store operations assert ThreadPool.assertCurrentThreadPool( ThreadPool.Names.SNAPSHOT, @@ -3539,7 +3539,7 @@ public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotI @Override public void verify(String seed, DiscoveryNode localNode) { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (isReadOnly()) { try { latestIndexBlobId(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 22c967bb2ea14..1263532117ac0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -9,12 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ObjectArrayPriorityQueue; -import org.elasticsearch.common.util.ObjectObjectPagedHashMap; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -22,7 +19,8 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.DelayedBucketReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -204,36 +202,63 @@ int[] getReverseMuls() { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final BucketsQueue queue = new BucketsQueue(reduceContext); - boolean earlyTerminated = false; + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().compareKey(b.current()) < 0; + } + }; + private boolean earlyTerminated = false; @Override public void accept(InternalAggregation aggregation) { - InternalComposite sortedAgg = (InternalComposite) aggregation; + final InternalComposite sortedAgg = (InternalComposite) aggregation; earlyTerminated |= sortedAgg.earlyTerminated; - for (InternalBucket bucket : sortedAgg.getBuckets()) { - if (queue.add(bucket) == false) { - // if the bucket is not competitive, we can break - // because incoming buckets are sorted - break; - } + if (sortedAgg.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(sortedAgg.buckets.iterator())); } } @Override public InternalAggregation get() { - final List result = queue.get(); + InternalBucket lastBucket = null; + final List buckets = new ArrayList<>(); + final List result = new ArrayList<>(); + while (pq.size() > 0) { + IteratorAndCurrent top = pq.top(); + if (lastBucket != null && top.current().compareKey(lastBucket) != 0) { + InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); + buckets.clear(); + result.add(reduceBucket); + if (result.size() >= getSize()) { + break; + } + } + lastBucket = top.current(); + buckets.add(top.current()); + if (top.hasNext()) { + top.next(); + pq.updateTop(); + } else { + pq.pop(); + } + } + if (buckets.size() > 0) { + InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); + result.add(reduceBucket); + } + List reducedFormats = formats; CompositeKey lastKey = null; - if (result.isEmpty() == false) { - InternalBucket lastBucket = result.get(result.size() - 1); + if (result.size() > 0) { + lastBucket = result.get(result.size() - 1); /* Attach the formats from the last bucket to the reduced composite * so that we can properly format the after key. */ reducedFormats = lastBucket.formats; lastKey = lastBucket.getRawKey(); } reduceContext.consumeBucketsAndMaybeBreak(result.size()); - InternalComposite reduced = new InternalComposite( + final InternalComposite reduced = new InternalComposite( name, getSize(), sourceNames, @@ -248,85 +273,9 @@ public InternalAggregation get() { reduced.validateAfterKey(); return reduced; } - - @Override - public void close() { - Releasables.close(queue); - } }; } - private class BucketsQueue implements Releasable { - private final ObjectObjectPagedHashMap> bucketReducers; - private final ObjectArrayPriorityQueue queue; - private final AggregationReduceContext reduceContext; - - private BucketsQueue(AggregationReduceContext reduceContext) { - this.reduceContext = reduceContext; - bucketReducers = new ObjectObjectPagedHashMap<>(getSize(), reduceContext.bigArrays()); - queue = new ObjectArrayPriorityQueue<>(getSize(), reduceContext.bigArrays()) { - @Override - protected boolean lessThan(InternalBucket a, InternalBucket b) { - return b.compareKey(a) < 0; - } - }; - } - - /** adds a bucket to the queue. Return false if the bucket is not competitive, otherwise true.*/ - boolean add(InternalBucket bucket) { - DelayedBucketReducer delayed = bucketReducers.get(bucket.key); - if (delayed == null) { - final InternalBucket out = queue.insertWithOverflow(bucket); - if (out == null) { - // bucket is added - delayed = new DelayedBucketReducer<>(bucket, reduceContext); - } else if (out == bucket) { - // bucket is not competitive - return false; - } else { - // bucket replaces existing bucket - delayed = bucketReducers.remove(out.key); - assert delayed != null; - delayed.reset(bucket); - } - bucketReducers.put(bucket.key, delayed); - } - delayed.accept(bucket); - return true; - } - - /** Return the list of reduced buckets */ - List get() { - final int bucketsSize = (int) bucketReducers.size(); - final InternalBucket[] result = new InternalBucket[bucketsSize]; - for (int i = bucketsSize - 1; i >= 0; i--) { - final InternalBucket bucket = queue.pop(); - assert bucket != null; - /* Use the formats from the bucket because they'll be right to format - * the key. The formats on the InternalComposite doing the reducing are - * just whatever formats make sense for *its* index. This can be real - * trouble when the index doing the reducing is unmapped. */ - final var reducedFormats = bucket.formats; - final DelayedBucketReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); - result[i] = new InternalBucket( - sourceNames, - reducedFormats, - bucket.key, - reverseMuls, - missingOrders, - reducer.getDocCount(), - reducer.getAggregations() - ); - } - return List.of(result); - } - - @Override - public void close() { - Releasables.close(bucketReducers, queue); - } - } - @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalComposite( @@ -343,6 +292,23 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { ); } + private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (InternalBucket bucket : buckets) { + reducer.accept(bucket); + } + /* Use the formats from the bucket because they'll be right to format + * the key. The formats on the InternalComposite doing the reducing are + * just whatever formats make sense for *its* index. This can be real + * trouble when the index doing the reducing is unmapped. */ + final var reducedFormats = reducer.getProto().formats; + final long docCount = reducer.getDocCount(); + final InternalAggregations aggs = reducer.getAggregations(); + return new InternalBucket(sourceNames, reducedFormats, reducer.getProto().key, reverseMuls, missingOrders, docCount, aggs); + } + } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index ce42145f2ceb1..4939c3bc88744 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -8,12 +8,12 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -23,6 +23,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -262,6 +264,11 @@ public InternalDateHistogram(StreamInput in) throws IOException { downsampledResultsOffset = false; } buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingLong(b -> b.key)); + } } @Override @@ -323,6 +330,71 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } + private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { + int consumeBucketCount = 0; + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + + if (top.current().key != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + reducedBuckets.add(reduced); + } + currentBuckets.clear(); + key = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key > key : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + } + } + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + return reducedBuckets; + } + + /** + * Reduce a list of same-keyed buckets (from multiple shards) to a single bucket. This + * requires all buckets to have the same key. + */ + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); + } + } + private void addEmptyBuckets(List list, AggregationReduceContext reduceContext) { /* * Make sure we have space for the empty buckets we're going to add by @@ -433,31 +505,25 @@ private void iterateEmptyBuckets(List list, ListIterator iter, L @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongKeyedMultiBucketsAggregatorReducer reducer = new LongKeyedMultiBucketsAggregatorReducer<>( - reduceContext, - size, - minDocCount - ) { + private final PriorityQueue> pq = new PriorityQueue<>(size) { @Override - protected Bucket createBucket(long key, long docCount, InternalAggregations aggregations) { - return InternalDateHistogram.this.createBucket(key, docCount, aggregations); + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key < b.current().key; } }; @Override public void accept(InternalAggregation aggregation) { - InternalDateHistogram dateHistogram = (InternalDateHistogram) aggregation; - for (Bucket bucket : dateHistogram.buckets) { - reducer.accept(bucket.key, bucket); + final InternalDateHistogram histogram = (InternalDateHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - List reducedBuckets = reducer.get(); + List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { - reducedBuckets.sort(Comparator.comparingLong(b -> b.key)); if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); } @@ -486,11 +552,6 @@ public InternalAggregation get() { getMetadata() ); } - - @Override - public void close() { - Releasables.close(reducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 980f11ab0aa61..4ff01c5648486 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,6 +21,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -177,6 +179,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(minBound); out.writeDouble(maxBound); subAggregations.writeTo(out); + } @Override @@ -240,6 +243,11 @@ public InternalHistogram(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingDouble(b -> b.key)); + } } @Override @@ -282,6 +290,69 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } + private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + int consumeBucketCount = 0; + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + + if (Double.compare(top.current().key, key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + // Using Double.compare instead of != to handle NaN correctly. + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + currentBuckets.clear(); + key = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert Double.compare(top.current().key, key) > 0 : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + } + } + + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + return reducedBuckets; + } + + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); + } + } + private double nextKey(double key) { return round(key + emptyBucketInfo.interval + emptyBucketInfo.interval / 2); } @@ -376,31 +447,25 @@ private void iterateEmptyBuckets(List list, ListIterator iter, D @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongKeyedMultiBucketsAggregatorReducer reducer = new LongKeyedMultiBucketsAggregatorReducer<>( - reduceContext, - size, - minDocCount - ) { + final PriorityQueue> pq = new PriorityQueue<>(size) { @Override - protected Bucket createBucket(long key, long docCount, InternalAggregations aggregations) { - return InternalHistogram.this.createBucket(NumericUtils.sortableLongToDouble(key), docCount, aggregations); + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return Double.compare(a.current().key, b.current().key) < 0; } }; @Override public void accept(InternalAggregation aggregation) { - InternalHistogram histogram = (InternalHistogram) aggregation; - for (Bucket bucket : histogram.buckets) { - reducer.accept(NumericUtils.doubleToSortableLong(bucket.key), bucket); + final InternalHistogram histogram = (InternalHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - List reducedBuckets = reducer.get(); + List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { - reducedBuckets.sort(Comparator.comparingDouble(b -> b.key)); if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); } @@ -418,11 +483,6 @@ public InternalAggregation get() { } return new InternalHistogram(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); } - - @Override - public void close() { - Releasables.close(reducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 27a79095eb49d..05944b75d06d5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -8,11 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.LongObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,6 +20,7 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -260,6 +260,11 @@ public InternalVariableWidthHistogram(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); buckets = in.readCollectionAsList(stream -> new Bucket(stream, format)); targetNumBuckets = in.readVInt(); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingDouble(b -> b.centroid)); + } } @Override @@ -309,21 +314,62 @@ public Number getKey(MultiBucketsAggregation.Bucket bucket) { } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { - long docCount = 0; + assert buckets.isEmpty() == false; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; double sum = 0; - for (InternalVariableWidthHistogram.Bucket bucket : buckets) { - docCount += bucket.docCount; - min = Math.min(min, bucket.bounds.min); - max = Math.max(max, bucket.bounds.max); - sum += bucket.docCount * bucket.centroid; - } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - final double centroid = sum / docCount; - final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); - return new Bucket(centroid, bounds, docCount, format, aggs); + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + min = Math.min(min, bucket.bounds.min); + max = Math.max(max, bucket.bounds.max); + sum += bucket.docCount * bucket.centroid; + reducer.accept(bucket); + } + final double centroid = sum / reducer.getDocCount(); + final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); + return new Bucket(centroid, bounds, reducer.getDocCount(), format, reducer.getAggregations()); + } + } + + public List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + double key = pq.top().current().centroid(); + // list of buckets coming from different shards that have the same key + final List currentBuckets = new ArrayList<>(); + do { + IteratorAndCurrent top = pq.top(); + + if (Double.compare(top.current().centroid(), key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = top.current().centroid(); + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + Bucket prev = top.current(); + top.next(); + assert top.current().compareKey(prev) >= 0 : "shards must return data sorted by centroid"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + } + } + + mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); + return reducedBuckets; } static class BucketRange { @@ -479,42 +525,24 @@ private static void adjustBoundsForOverlappingBuckets(List buckets) { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return Double.compare(a.current().centroid, b.current().centroid) < 0; + } + }; @Override public void accept(InternalAggregation aggregation) { - InternalVariableWidthHistogram histogram = (InternalVariableWidthHistogram) aggregation; - for (Bucket bucket : histogram.getBuckets()) { - long key = NumericUtils.doubleToSortableLong(bucket.centroid()); - ReducerAndExtraInfo reducer = bucketsReducer.get(key); - if (reducer == null) { - reducer = new ReducerAndExtraInfo(new BucketReducer<>(bucket, reduceContext, size)); - bucketsReducer.put(key, reducer); - reduceContext.consumeBucketsAndMaybeBreak(1); - } - reducer.min[0] = Math.min(reducer.min[0], bucket.bounds.min); - reducer.max[0] = Math.max(reducer.max[0], bucket.bounds.max); - reducer.sum[0] += bucket.docCount * bucket.centroid; - reducer.reducer.accept(bucket); + final InternalVariableWidthHistogram histogram = (InternalVariableWidthHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.forEach(entry -> { - final double centroid = entry.value.sum[0] / entry.value.reducer.getDocCount(); - final Bucket.BucketBounds bounds = new Bucket.BucketBounds(entry.value.min[0], entry.value.max[0]); - reducedBuckets.add( - new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.getAggregations()) - ); - }); - reducedBuckets.sort(Comparator.comparing(Bucket::centroid)); - mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); + final List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { buckets.sort(Comparator.comparing(Bucket::min)); mergeBucketsWithSameMin(reducedBuckets, reduceContext); @@ -522,21 +550,9 @@ public InternalAggregation get() { } return new InternalVariableWidthHistogram(getName(), reducedBuckets, emptyBucketInfo, targetNumBuckets, format, metadata); } - - @Override - public void close() { - bucketsReducer.forEach(entry -> Releasables.close(entry.value.reducer)); - Releasables.close(bucketsReducer); - } }; } - private record ReducerAndExtraInfo(BucketReducer reducer, double[] min, double[] max, double[] sum) { - private ReducerAndExtraInfo(BucketReducer reducer) { - this(reducer, new double[] { Double.POSITIVE_INFINITY }, new double[] { Double.NEGATIVE_INFINITY }, new double[] { 0 }); - } - } - @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalVariableWidthHistogram( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index 3557947bb9ea7..48b11524df792 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -9,10 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.prefix; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ObjectObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,12 +20,12 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -225,51 +224,69 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key.compareTo(b.current().key) < 0; + } + }; @Override public void accept(InternalAggregation aggregation) { final InternalIpPrefix ipPrefix = (InternalIpPrefix) aggregation; - for (Bucket bucket : ipPrefix.getBuckets()) { - BucketReducer bucketReducer = buckets.get(bucket.key); - if (bucketReducer == null) { - bucketReducer = new BucketReducer<>(bucket, reduceContext, size); - boolean success = false; - try { - buckets.put(bucket.key, bucketReducer); - success = true; - } finally { - if (success == false) { - Releasables.close(bucketReducer); - } - } - } - bucketReducer.accept(bucket); + if (ipPrefix.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(ipPrefix.buckets.iterator())); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>(Math.toIntExact(buckets.size())); - buckets.forEach(entry -> { - if (false == reduceContext.isFinalReduce() || entry.value.getDocCount() >= minDocCount) { - reducedBuckets.add(createBucket(entry.value.getProto(), entry.value.getAggregations(), entry.value.getDocCount())); - } - }); + final List reducedBuckets = reduceBuckets(pq, reduceContext); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); - reducedBuckets.sort(Comparator.comparing(a -> a.key)); return new InternalIpPrefix(getName(), format, keyed, minDocCount, reducedBuckets, metadata); } + }; + } - @Override - public void close() { - buckets.forEach(entry -> Releasables.close(entry.value)); - Releasables.close(buckets); + private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same value + List currentBuckets = new ArrayList<>(); + BytesRef value = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + if (top.current().key.equals(value) == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (false == reduceContext.isFinalReduce() || reduced.getDocCount() >= minDocCount) { + reducedBuckets.add(reduced); + } + currentBuckets.clear(); + value = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key.compareTo(value) > 0 + : "shards must return data sorted by value [" + top.current().key + "] and [" + value + "]"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (false == reduceContext.isFinalReduce() || reduced.getDocCount() >= minDocCount) { + reducedBuckets.add(reduced); + } } - }; + } + + return reducedBuckets; } @Override @@ -322,6 +339,16 @@ private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, ); } + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto(), reducer.getAggregations(), reducer.getDocCount()); + } + } + @Override public List getBuckets() { return Collections.unmodifiableList(buckets); diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 9668228ac0ec3..544b085a7006d 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -24,6 +24,9 @@ /** * A builder for fixed executors. + * + * Builds an Executor with a static number of threads, as opposed to {@link ScalingExecutorBuilder} that dynamically scales the number of + * threads in the pool up and down based on request load. */ public final class FixedExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java index 07504bc5f9d2e..29a7d5df08b7b 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java @@ -24,6 +24,10 @@ /** * A builder for scaling executors. + * + * The {@link #build} method will instantiate a java {@link ExecutorService} thread pool that starts with the specified minimum number of + * threads and then scales up to the specified max number of threads as needed for excess work, scaling back when the burst of activity + * stops. As opposed to the {@link FixedExecutorBuilder} that keeps a fixed number of threads alive. */ public final class ScalingExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 507eff05780b8..ceda140827527 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.node.Node; import org.elasticsearch.node.ReportingService; import org.elasticsearch.telemetry.metric.Instrument; @@ -59,13 +60,28 @@ import static java.util.Map.entry; import static org.elasticsearch.core.Strings.format; +/** + * Manages all the Java thread pools we create. {@link Names} contains a list of the thread pools, but plugins can dynamically add more + * thread pools to instantiate. + */ public class ThreadPool implements ReportingService, Scheduler { private static final Logger logger = LogManager.getLogger(ThreadPool.class); + /** + * List of names that identify Java thread pools that are created in {@link ThreadPool#ThreadPool}. + */ public static class Names { - public static final String SAME = "same"; + /** + * All the tasks that do not relate to the purpose of one of the other thread pools should use this thread pool. Try to pick one of + * the other more specific thread pools where possible. + */ public static final String GENERIC = "generic"; + /** + * Important management tasks that keep the cluster from falling apart. + * This thread pool ensures cluster coordination tasks do not get blocked by less critical tasks and can continue to make progress. + * This thread pool also defaults to a single thread, reducing contention on the Coordinator mutex. + */ public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String GET = "get"; public static final String ANALYZE = "analyze"; @@ -75,6 +91,10 @@ public static class Names { public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; + /** + * Cluster management tasks. Tasks that manage data, and tasks that report on cluster health via statistics etc. + * Not a latency sensitive thread pool: some tasks may time be long-running; and the thread pool size is limited / relatively small. + */ public static final String MANAGEMENT = "management"; public static final String FLUSH = "flush"; public static final String REFRESH = "refresh"; @@ -99,9 +119,13 @@ public static class Names { public static final String THREAD_POOL_METRIC_NAME_REJECTED = ".threads.rejected.total"; public enum ThreadPoolType { + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 DIRECT("direct"), FIXED("fixed"), - FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), // TODO: remove in 9.0 + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 + FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), SCALING("scaling"); private final String type; @@ -127,7 +151,6 @@ public static ThreadPoolType fromType(String type) { } public static final Map THREAD_POOL_TYPES = Map.ofEntries( - entry(Names.SAME, ThreadPoolType.DIRECT), entry(Names.GENERIC, ThreadPoolType.SCALING), entry(Names.GET, ThreadPoolType.FIXED), entry(Names.ANALYZE, ThreadPoolType.FIXED), @@ -196,6 +219,13 @@ public Collection builders() { Setting.Property.NodeScope ); + /** + * Defines and builds the many thread pools delineated in {@link Names}. + * + * @param settings + * @param meterRegistry + * @param customBuilders a list of additional thread pool builders that were defined elsewhere (like a Plugin). + */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final ExecutorBuilder... customBuilders) { assert Node.NODE_NAME_SETTING.exists(settings); @@ -324,6 +354,7 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex threadContext = new ThreadContext(settings); + // Now that all the thread pools have been defined, actually build them. final Map executors = new HashMap<>(); for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); @@ -335,16 +366,10 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex executors.put(entry.getKey(), executorHolder); } - executors.put(Names.SAME, new ExecutorHolder(EsExecutors.DIRECT_EXECUTOR_SERVICE, new Info(Names.SAME, ThreadPoolType.DIRECT))); this.executors = Map.copyOf(executors); this.executors.forEach((k, v) -> instruments.put(k, setupMetrics(meterRegistry, k, v))); this.instruments = instruments; - final List infos = executors.values() - .stream() - .filter(holder -> holder.info.getName().equals("same") == false) - .map(holder -> holder.info) - .toList(); - this.threadPoolInfo = new ThreadPoolInfo(infos); + this.threadPoolInfo = new ThreadPoolInfo(executors.values().stream().map(holder -> holder.info).toList()); this.scheduler = Scheduler.initScheduler(settings, "scheduler"); this.slowSchedulerWarnThresholdNanos = SLOW_SCHEDULER_TASK_WARN_THRESHOLD_SETTING.get(settings).nanos(); this.cachedTimeThread = new CachedTimeThread( @@ -481,10 +506,6 @@ public ThreadPoolStats stats() { List stats = new ArrayList<>(); for (ExecutorHolder holder : executors.values()) { final String name = holder.info.getName(); - // no need to have info on "same" thread pool - if ("same".equals(name)) { - continue; - } int threads = -1; int queue = -1; int active = -1; @@ -909,6 +930,11 @@ void check(long newAbsoluteMillis, long newRelativeNanos) { } } + /** + * Holds a thread pool and additional ES information ({@link Info}) about that Java thread pool ({@link ExecutorService}) instance. + * + * See {@link Names} for a list of thread pools, though there can be more dynamically added via plugins. + */ static class ExecutorHolder { private final ExecutorService executor; public final Info info; @@ -924,6 +950,9 @@ ExecutorService executor() { } } + /** + * The settings used to create a Java ExecutorService thread pool. + */ public static class Info implements Writeable, ToXContentFragment { private final String name; diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec new file mode 100644 index 0000000000000..b99a15507f742 --- /dev/null +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -0,0 +1 @@ +org.elasticsearch.index.codec.Elasticsearch814Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java new file mode 100644 index 0000000000000..75a1bf8732a4f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.alias; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.alias.RandomAliasActionsGenerator; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class IndicesAliasesResponseTests extends AbstractWireSerializingTestCase { + public void testMixedModeSerialization() throws IOException { + + // AcknowledgedResponse to IndicesAliasesResponse + // in version before TransportVersions.ALIAS_ACTION_RESULTS + { + var ack = AcknowledgedResponse.of(randomBoolean()); + try (BytesStreamOutput output = new BytesStreamOutput()) { + ack.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(TransportVersions.V_8_12_0); + + var indicesAliasesResponse = new IndicesAliasesResponse(in); + + assertEquals(ack.isAcknowledged(), indicesAliasesResponse.isAcknowledged()); + assertTrue(indicesAliasesResponse.getActionResults().isEmpty()); + assertFalse(indicesAliasesResponse.hasErrors()); + } + } + } + + // IndicesAliasesResponse to AcknowledgedResponse + // out version before TransportVersions.ALIAS_ACTION_RESULTS + { + var indicesAliasesResponse = randomIndicesAliasesResponse(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(TransportVersions.V_8_12_0); + + indicesAliasesResponse.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + var ack = AcknowledgedResponse.readFrom(in); + assertEquals(ack.isAcknowledged(), indicesAliasesResponse.isAcknowledged()); + } + } + } + } + + @Override + protected Writeable.Reader instanceReader() { + return IndicesAliasesResponse::new; + } + + @Override + protected IndicesAliasesResponse createTestInstance() { + return randomIndicesAliasesResponse(); + } + + private static IndicesAliasesResponse randomIndicesAliasesResponse() { + int numActions = between(0, 5); + List results = new ArrayList<>(); + for (int i = 0; i < numActions; ++i) { + results.add(randomIndicesAliasesResult()); + } + return new IndicesAliasesResponse(randomBoolean(), randomBoolean(), results); + } + + @Override + protected IndicesAliasesResponse mutateInstance(IndicesAliasesResponse instance) throws IOException { + switch (between(0, 2)) { + case 0: { + boolean acknowledged = instance.isAcknowledged() == false; + return new IndicesAliasesResponse(acknowledged, instance.hasErrors(), instance.getActionResults()); + } + case 1: { + boolean errors = instance.hasErrors() == false; + return new IndicesAliasesResponse(instance.isAcknowledged(), errors, instance.getActionResults()); + } + default: { + var results = new ArrayList<>(instance.getActionResults()); + if (results.isEmpty()) { + results.add(randomIndicesAliasesResult()); + } else { + results.remove(between(0, results.size() - 1)); + } + return new IndicesAliasesResponse(instance.isAcknowledged(), instance.hasErrors(), results); + } + } + } + + private static IndicesAliasesResponse.AliasActionResult randomIndicesAliasesResult() { + var action = RandomAliasActionsGenerator.randomAliasAction(); + var indices = Arrays.asList(generateRandomStringArray(10, 5, false, false)); + return IndicesAliasesResponse.AliasActionResult.build(indices, action, randomIntBetween(0, 3)); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 2226c40b618f4..23395556761f1 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.IndexNotFoundException; @@ -48,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Assume; import org.junit.Before; @@ -843,7 +843,7 @@ private BulkOperation newBulkOperation( return new BulkOperation( null, threadPool, - ThreadPool.Names.SAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, clusterService, request, client, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 3057b00553a22..20d826b11c1e7 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; @@ -137,7 +138,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index b97e8303a8eb5..52d50b3a23a0d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -47,9 +47,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.junit.Before; @@ -57,13 +55,18 @@ import org.mockito.Captor; import org.mockito.MockitoAnnotations; -import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Predicate; @@ -73,6 +76,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -95,6 +99,9 @@ public class TransportBulkActionIngestTests extends ESTestCase { private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE); private FeatureService mockFeatureService; + private static final ExecutorService writeExecutor = new NamedDirectExecutorService("write"); + private static final ExecutorService systemWriteExecutor = new NamedDirectExecutorService("system_write"); + /** Services needed by bulk action */ TransportService transportService; ClusterService clusterService; @@ -158,7 +165,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { @@ -186,13 +193,95 @@ class TestSingleItemBulkWriteAction extends TransportSingleItemBulkWriteAction shutdownNow() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isShutdown() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isTerminated() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Callable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task, T result) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public List> invokeAll(Collection> tasks) { + return null; + } + + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + } + @Before - public void setupAction() throws IOException { + public void setupAction() { // initialize captors, which must be members to use @Capture because of generics threadPool = mock(ThreadPool.class); + when(threadPool.executor(eq(ThreadPool.Names.WRITE))).thenReturn(writeExecutor); + when(threadPool.executor(eq(ThreadPool.Names.SYSTEM_WRITE))).thenReturn(systemWriteExecutor); MockitoAnnotations.openMocks(this); // setup services that will be called by action - transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); + transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(threadPool); clusterService = mock(ClusterService.class); localIngest = true; // setup nodes for local and remote @@ -312,7 +401,7 @@ public void testIngestLocal() throws Exception { redirectHandler.capture(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -360,7 +449,7 @@ public void testSingleItemBulkActionIngestLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -408,7 +497,7 @@ public void testIngestSystemLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.SYSTEM_WRITE) + same(systemWriteExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -567,7 +656,7 @@ private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexNa any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest1.getPipeline(), "default_pipeline"); assertEquals(indexRequest2.getPipeline(), "default_pipeline"); @@ -617,7 +706,7 @@ public void testDoExecuteCalledTwiceCorrectly() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertFalse(action.indexCreated); // still no index yet, the ingest node failed. @@ -713,7 +802,7 @@ public void testFindDefaultPipelineFromTemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -753,7 +842,7 @@ public void testFindDefaultPipelineFromV2TemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -782,7 +871,7 @@ public void testIngestCallbackExceptionHandled() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); indexRequest1.autoGenerateId(); completionHandler.getValue().accept(Thread.currentThread(), null); @@ -821,7 +910,7 @@ private void validateDefaultPipeline(IndexRequest indexRequest) { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest.getPipeline(), "default_pipeline"); completionHandler.getValue().accept(null, exception); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 1a16d9083df55..960397033f602 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -59,12 +59,14 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.bulk.TransportBulkAction.prohibitCustomRoutingOnDataStream; import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamServiceTests.createDataStream; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.junit.Assume.assumeThat; import static org.mockito.ArgumentMatchers.any; @@ -321,31 +323,45 @@ public void testOnlySystem() { assertFalse(TransportBulkAction.isOnlySystem(buildBulkRequest(mixed), indicesLookup, systemIndices)); } - public void testRejectCoordination() throws Exception { + private void blockWriteThreadPool(CountDownLatch blockingLatch) { + assertThat(blockingLatch.getCount(), greaterThan(0L)); + final var executor = threadPool.executor(ThreadPool.Names.WRITE); + // Add tasks repeatedly until we get an EsRejectedExecutionException which indicates that the threadpool and its queue are full. + expectThrows(EsRejectedExecutionException.class, () -> { + // noinspection InfiniteLoopStatement + while (true) { + executor.execute(() -> safeAwait(blockingLatch)); + } + }); + } + + public void testRejectCoordination() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); + final var blockingLatch = new CountDownLatch(1); try { - threadPool.startForcingRejections(); + blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } - public void testRejectionAfterCreateIndexIsPropagated() throws Exception { + public void testRejectionAfterCreateIndexIsPropagated() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); bulkAction.failIndexCreation = randomBoolean(); + final var blockingLatch = new CountDownLatch(1); try { - bulkAction.beforeIndexCreation = threadPool::startForcingRejections; + bulkAction.beforeIndexCreation = () -> blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); assertTrue(bulkAction.indexCreated); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index cb9bdd1f3a827..09513351652b8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -50,6 +50,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.Map; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; @@ -140,12 +141,12 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { expected.set(1000000); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } else { @@ -165,13 +166,13 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { long elapsed = spinForAtLeastOneMillisecond(); expected.set(elapsed); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 2657bdef8c09d..fc9e9f05542c9 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -200,7 +200,7 @@ public void onFailure(Exception e) { bulkAction.createMissingIndicesAndIndexData( task, bulkRequest, - randomAlphaOfLength(10), + r -> fail("executor is unused"), listener, indicesToAutoCreate, dataStreamsToRollover, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index f086b52c1b491..d54fcbd8a9e41 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -177,6 +177,8 @@ public void testRollover() { assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + // Irrespective of whether the rollover was performed lazily, rolloverOnWrite should always be set to false after rollover. + assertFalse(rolledDs.rolloverOnWrite()); } public void testRolloverWithConflictingBackingIndexName() { @@ -272,6 +274,8 @@ public void testRolloverFailureStore() { assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); + // Ensure that the rolloverOnWrite flag hasn't changed when rolling over a failure store. + assertThat(rolledDs.rolloverOnWrite(), equalTo(ds.rolloverOnWrite())); assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index 5cc1a7206e7e4..116acf938fcbc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -83,6 +83,8 @@ public void testIndexMetadataSerialization() throws IOException { IndexMetadataStats indexStats = randomBoolean() ? randomIndexStats(numShard) : null; Double indexWriteLoadForecast = randomBoolean() ? randomDoubleBetween(0.0, 128, true) : null; Long shardSizeInBytesForecast = randomBoolean() ? randomLongBetween(1024, 10240) : null; + Map inferenceFields = randomInferenceFields(); + IndexMetadata metadata = IndexMetadata.builder("foo") .settings(indexSettings(numShard, numberOfReplicas).put("index.version.created", 1)) .creationDate(randomLong()) @@ -107,6 +109,7 @@ public void testIndexMetadataSerialization() throws IOException { .stats(indexStats) .indexWriteLoadForecast(indexWriteLoadForecast) .shardSizeInBytesForecast(shardSizeInBytesForecast) + .putInferenceFields(inferenceFields) .build(); assertEquals(system, metadata.isSystem()); @@ -141,6 +144,7 @@ public void testIndexMetadataSerialization() throws IOException { assertEquals(metadata.getStats(), fromXContentMeta.getStats()); assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad()); assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes()); + assertEquals(metadata.getInferenceFields(), fromXContentMeta.getInferenceFields()); final BytesStreamOutput out = new BytesStreamOutput(); metadata.writeTo(out); @@ -162,8 +166,9 @@ public void testIndexMetadataSerialization() throws IOException { assertEquals(metadata.getCustomData(), deserialized.getCustomData()); assertEquals(metadata.isSystem(), deserialized.isSystem()); assertEquals(metadata.getStats(), deserialized.getStats()); - assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad()); - assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes()); + assertEquals(metadata.getForecastedWriteLoad(), deserialized.getForecastedWriteLoad()); + assertEquals(metadata.getForecastedShardSizeInBytes(), deserialized.getForecastedShardSizeInBytes()); + assertEquals(metadata.getInferenceFields(), deserialized.getInferenceFields()); } } @@ -547,10 +552,34 @@ public void testPartialIndexReceivesDataFrozenTierPreference() { } } + public void testInferenceFieldMetadata() { + Settings.Builder settings = indexSettings(IndexVersion.current(), randomIntBetween(1, 8), 0); + IndexMetadata idxMeta1 = IndexMetadata.builder("test").settings(settings).build(); + assertTrue(idxMeta1.getInferenceFields().isEmpty()); + + Map dynamicFields = randomInferenceFields(); + IndexMetadata idxMeta2 = IndexMetadata.builder(idxMeta1).putInferenceFields(dynamicFields).build(); + assertThat(idxMeta2.getInferenceFields(), equalTo(dynamicFields)); + } + private static Settings indexSettingsWithDataTier(String dataTier) { return indexSettings(IndexVersion.current(), 1, 0).put(DataTier.TIER_PREFERENCE, dataTier).build(); } + public static Map randomInferenceFields() { + Map map = new HashMap<>(); + int numFields = randomIntBetween(0, 5); + for (int i = 0; i < numFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + map.put(field, randomInferenceFieldMetadata(field)); + } + return map; + } + + private static InferenceFieldMetadata randomInferenceFieldMetadata(String name) { + return new InferenceFieldMetadata(name, randomIdentifier(), randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new)); + } + private IndexMetadataStats randomIndexStats(int numberOfShards) { IndexWriteLoad.Builder indexWriteLoadBuilder = IndexWriteLoad.builder(numberOfShards); int numberOfPopulatedWriteLoads = randomIntBetween(0, numberOfShards); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java new file mode 100644 index 0000000000000..bd4c87be51157 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.equalTo; + +public class InferenceFieldMetadataTests extends AbstractXContentTestCase { + + public void testSerialization() throws IOException { + final InferenceFieldMetadata before = createTestItem(); + final BytesStreamOutput out = new BytesStreamOutput(); + before.writeTo(out); + + final StreamInput in = out.bytes().streamInput(); + final InferenceFieldMetadata after = new InferenceFieldMetadata(in); + + assertThat(after, equalTo(before)); + } + + @Override + protected InferenceFieldMetadata createTestInstance() { + return createTestItem(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return p -> p.equals(""); // do not add elements at the top-level as any element at this level is parsed as a new inference field + } + + @Override + protected InferenceFieldMetadata doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + assertEquals(XContentParser.Token.FIELD_NAME, parser.currentToken()); + InferenceFieldMetadata inferenceMetadata = InferenceFieldMetadata.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return inferenceMetadata; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + private static InferenceFieldMetadata createTestItem() { + String name = randomAlphaOfLengthBetween(3, 10); + String inferenceId = randomIdentifier(); + String[] inputFields = generateRandomStringArray(5, 10, false, false); + return new InferenceFieldMetadata(name, inferenceId, inputFields); + } + + public void testNullCtorArgsThrowException() { + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null)); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index 0901b1190cfc0..3f63875bfc216 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -8,8 +8,9 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; @@ -19,6 +20,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -156,11 +158,11 @@ public void testMustExist() { // Show that removing non-existing alias with mustExist == true fails final ClusterState finalCS = after; - final ResourceNotFoundException iae = expectThrows( - ResourceNotFoundException.class, + final AliasesNotFoundException iae = expectThrows( + AliasesNotFoundException.class, () -> service.applyAliasActions(finalCS, singletonList(new AliasAction.Remove(index, "test_2", true))) ); - assertThat(iae.getMessage(), containsString("required alias [test_2] does not exist")); + assertThat(iae.getMessage(), containsString("aliases [test_2] missing")); } public void testMultipleIndices() { @@ -690,10 +692,12 @@ public void testAddAndRemoveAliasClusterStateUpdate() throws Exception { String index = randomAlphaOfLength(5); ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index); IndicesAliasesClusterStateUpdateRequest addAliasRequest = new IndicesAliasesClusterStateUpdateRequest( - List.of(new AliasAction.Add(index, "test", null, null, null, null, null)) + List.of(new AliasAction.Add(index, "test", null, null, null, null, null)), + List.of(AliasActionResult.buildSuccess(List.of(index), AliasActions.add().aliases("test").indices(index))) ); IndicesAliasesClusterStateUpdateRequest removeAliasRequest = new IndicesAliasesClusterStateUpdateRequest( - List.of(new AliasAction.Remove(index, "test", true)) + List.of(new AliasAction.Remove(index, "test", true)), + List.of(AliasActionResult.buildSuccess(List.of(index), AliasActions.remove().aliases("test").indices(index))) ); ClusterState after = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 7a3d48aad13d3..bd4aa0241cd27 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -12,10 +12,11 @@ import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntField; +import org.apache.lucene.document.KeywordField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.TransportVersion; @@ -31,6 +32,7 @@ import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; @@ -43,35 +45,51 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=0), chunkSize=14336, maxDocsPerChunk=128, blockShift=10)", + codec.storedFieldsFormat().toString() + ); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=3), chunkSize=245760, maxDocsPerChunk=2048, blockShift=10)", + codec.storedFieldsFormat().toString() + ); + } + + public void testLegacyDefault() throws Exception { + Codec codec = createCodecService().codec("legacy_default"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } - // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(null); - iwc.setCodec(actual); - IndexWriter iw = new IndexWriter(dir, iwc); - iw.addDocument(new Document()); - iw.commit(); - iw.close(); - DirectoryReader ir = DirectoryReader.open(dir); - SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); - assertNotNull(v); - assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); - ir.close(); - dir.close(); + public void testLegacyBestCompression() throws Exception { + Codec codec = createCodecService().codec("legacy_best_compression"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } private CodecService createCodecService() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index 4ce20e35869cb..74657842488b5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -63,7 +62,7 @@ public class PerFieldMapperCodecTests extends ESTestCase { """; public void testUseBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, randomBoolean(), false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, randomBoolean(), false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -71,7 +70,7 @@ public void testUseBloomFilter() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -79,13 +78,13 @@ public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled_noTimeSeriesMode() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); assertWarnings( @@ -94,28 +93,29 @@ public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() thr } public void testUseES87TSDBEncodingForTimestampField() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDoNotUseES87TSDBEncodingForTimestampFieldNonTimeSeriesIndex() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } public void testEnableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDisableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } - private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -140,31 +140,32 @@ private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeri """; mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); } - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeDisabledCodecDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeAndCodecEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); } - private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -173,7 +174,7 @@ private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean tim settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings.build(), "test"); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..1679813ed1340 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..5acdd4f5730e9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index d83c75455292f..726ec8561535e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -317,7 +317,8 @@ public void testDateNanoDocValues() throws IOException { "my_date", IndexNumericFieldData.NumericType.DATE_NANOSECONDS, CoreValuesSourceType.DATE, - DateNanosDocValuesField::new + DateNanosDocValuesField::new, + false ); // Read index and check the doc values DirectoryReader reader = DirectoryReader.open(w); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 20d5fdae5e4cf..f11d3f9b70d23 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -242,6 +242,11 @@ public LeafNumericFieldData loadDirect(LeafReaderContext context) throws Excepti protected boolean sortRequiresCustomComparator() { return false; } + + @Override + protected boolean isIndexed() { + return false; + } } private static final ScoreFunction RANDOM_SCORE_FUNCTION = new RandomScoreFunction(0, 0, new IndexFieldDataStub()); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 9582a6e76d539..41e865ceb97fb 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -66,7 +66,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -219,7 +218,7 @@ public void testExecuteIndexPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1127,7 +1126,7 @@ public String getType() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1172,7 +1171,7 @@ public void testExecuteBulkPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, times(1)).accept( argThat(item -> item == 2), @@ -1207,7 +1206,7 @@ public long normalisedBytesParsed() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return null; } @@ -1249,7 +1248,7 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(wrappedObserverWasUsed.get(), equalTo(2)); assertThat(parsedValueWasUsed.get(), equalTo(2)); @@ -1284,7 +1283,7 @@ public void testExecuteSuccess() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1326,7 +1325,7 @@ public void testDynamicTemplates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); latch.await(); assertThat(indexRequest.getDynamicTemplates(), equalTo(Map.of("foo", "bar", "foo.bar", "baz"))); @@ -1356,7 +1355,7 @@ public void testExecuteEmptyPipeline() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1419,7 +1418,7 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(any(), any()); verify(failureHandler, never()).accept(any(), any()); @@ -1477,7 +1476,7 @@ public void testExecuteFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1535,7 +1534,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1587,7 +1586,7 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1650,7 +1649,7 @@ public void testBulkRequestExecutionWithFailures() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(e -> e.getCause().equals(error))); @@ -1704,7 +1703,7 @@ public void testExecuteFailureRedirection() throws Exception { redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1761,7 +1760,7 @@ public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1827,7 +1826,7 @@ public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { requestItemRedirectHandler, requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemRedirectHandler, times(numIndexRequests)).apply(anyInt(), anyString(), argThat(e -> e.getCause().equals(error))); @@ -1888,7 +1887,7 @@ public void testBulkRequestExecution() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, never()).accept(any(), any()); @@ -2003,7 +2002,7 @@ public String execute() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); { @@ -2083,7 +2082,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.pipelineStats().size(), equalTo(2)); @@ -2109,7 +2108,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterSecondRequestStats = ingestService.stats(); assertThat(afterSecondRequestStats.pipelineStats().size(), equalTo(2)); @@ -2140,7 +2139,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterThirdRequestStats = ingestService.stats(); assertThat(afterThirdRequestStats.pipelineStats().size(), equalTo(2)); @@ -2172,7 +2171,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterForthRequestStats = ingestService.stats(); assertThat(afterForthRequestStats.pipelineStats().size(), equalTo(2)); @@ -2269,7 +2268,7 @@ public String getDescription() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -2359,7 +2358,7 @@ public void testCBORParsing() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); } @@ -2439,7 +2438,7 @@ public void testSetsRawTimestamp() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(indexRequest1.getRawTimestamp(), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index 327dc3d4f5fd0..0e4818701c5f5 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -227,7 +227,7 @@ private Repository createRepository() { new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually } }; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 6ac538f6c7ce9..96ad3cd5afb22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -163,7 +163,8 @@ private void testCase( "price", IndexNumericFieldData.NumericType.DOUBLE, CoreValuesSourceType.NUMERIC, - (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n) + (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n), + false ); FunctionScoreQuery query = new FunctionScoreQuery( new MatchAllDocsQuery(), diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index a5371e7b0b00a..39e73837c83ea 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -65,7 +65,7 @@ public void testDocValueFetcher() throws IOException { when(fieldType.valueFetcher(any(), any())).thenReturn( new DocValueFetcher( DocValueFormat.RAW, - new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null) + new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null, false) ) ); when(sec.getFieldType(any())).thenReturn(fieldType); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index dafe994b502f0..185f4582e7377 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2130,7 +2130,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { recoverySettings ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo in the test thread } } diff --git a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java index 40115e1402495..4f7d900f7cdb8 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java @@ -20,8 +20,7 @@ protected final ThreadPool.Info info(final ThreadPool threadPool, final String n return info; } } - assert "same".equals(name); - return null; + return fail(null, "unknown threadpool name: " + name); } protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final String name) { @@ -30,10 +29,10 @@ protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final S return stats; } } - throw new IllegalArgumentException(name); + return fail(null, "unknown threadpool name: " + name); } - protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) throws InterruptedException { + protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) { if (threadPool != null) { terminate(threadPool); } diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 5644e0b613651..b68f3ef76bbac 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -29,7 +29,7 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase { - public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException { + public void testCorrectThreadPoolTypePermittedInSettings() { String threadPoolName = randomThreadPoolName(); ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); ThreadPool threadPool = null; @@ -41,13 +41,7 @@ public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedExc .build(), MeterRegistry.NOOP ); - ThreadPool.Info info = info(threadPool, threadPoolName); - if (ThreadPool.Names.SAME.equals(threadPoolName)) { - assertNull(info); // we don't report on the "same" thread pool - } else { - // otherwise check we have the expected type - assertEquals(info.getThreadPoolType(), correctThreadPoolType); - } + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), correctThreadPoolType); } finally { terminateThreadPoolIfNeeded(threadPool); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 4e43cb33111a1..cb70ab8e491cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -288,7 +288,7 @@ public class Cluster implements Releasable { @Nullable // null means construct a list from all the current nodes private List seedHostsList; - Cluster(int initialNodeCount) { + public Cluster(int initialNodeCount) { this(initialNodeCount, true, Settings.EMPTY); } @@ -364,7 +364,13 @@ List addNodes(int newNodesCount) { return addedNodes; } - int size() { + public static void becomeCandidate(ClusterNode node, String reason) { + synchronized (node.coordinator.mutex) { + node.coordinator.becomeCandidate(reason); + } + } + + public int size() { return clusterNodes.size(); } @@ -760,7 +766,7 @@ private void stabilise(long stabilisationDurationMillis, boolean expectIdleJoinV } } - void bootstrapIfNecessary() { + public void bootstrapIfNecessary() { if (clusterNodes.stream().allMatch(ClusterNode::isNotUsefullyBootstrapped)) { assertThat("setting initial configuration may fail with disconnected nodes", disconnectedNodes, empty()); assertThat("setting initial configuration may fail with blackholed nodes", blackholedNodes, empty()); @@ -773,7 +779,7 @@ void bootstrapIfNecessary() { } } - void runFor(long runDurationMillis, String description) { + public void runFor(long runDurationMillis, String description) { final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + runDurationMillis; logger.info("--> runFor({}ms) running until [{}ms]: {}", runDurationMillis, endTime, description); @@ -856,7 +862,7 @@ ClusterNode getAnyNode() { return getAnyNodeExcept(); } - ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { + public ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { List filteredNodes = getAllNodesExcept(clusterNodesToExclude); assert filteredNodes.isEmpty() == false; return randomFrom(filteredNodes); @@ -956,7 +962,7 @@ public final class ClusterNode { private static final Logger logger = LogManager.getLogger(ClusterNode.class); private final int nodeIndex; - Coordinator coordinator; + public Coordinator coordinator; private final DiscoveryNode localNode; final CoordinationState.PersistedState persistedState; final Settings nodeSettings; @@ -1388,7 +1394,7 @@ public void onFailure(Exception e) { }); } - AckCollector submitUpdateTask( + public AckCollector submitUpdateTask( String source, UnaryOperator clusterStateUpdate, CoordinatorTestClusterStateUpdateTask taskListener @@ -1460,7 +1466,7 @@ void onDisconnectEventFrom(ClusterNode clusterNode) { transportService.disconnectFromNode(clusterNode.localNode); } - ClusterState getLastAppliedClusterState() { + public ClusterState getLastAppliedClusterState() { return clusterApplierService.state(); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index c83caa617e16e..e2b03c6b81af3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -729,6 +729,7 @@ public static IndicesService mockIndicesServices(MappingLookup mappingLookup) th Mapping mapping = new Mapping(root, new MetadataFieldMapper[0], null); DocumentMapper documentMapper = mock(DocumentMapper.class); when(documentMapper.mapping()).thenReturn(mapping); + when(documentMapper.mappers()).thenReturn(MappingLookup.EMPTY); when(documentMapper.mappingSource()).thenReturn(mapping.toCompressedXContent()); RoutingFieldMapper routingFieldMapper = mock(RoutingFieldMapper.class); when(routingFieldMapper.required()).thenReturn(false); diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java index e89a6c8a84bf7..1fac5a9917807 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java @@ -379,7 +379,7 @@ public ExecutorService generic() { @Override public ExecutorService executor(String name) { - return Names.SAME.equals(name) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : forkingExecutor; + return forkingExecutor; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 09c6eed08bf28..620db8dc83510 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -10,7 +10,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -43,6 +42,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.codec.PerFieldMapperCodec; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -243,7 +243,7 @@ protected static void withLuceneIndex( CheckedConsumer test ) throws IOException { IndexWriterConfig iwc = new IndexWriterConfig(IndexShard.buildIndexAnalyzer(mapperService)).setCodec( - new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) ); try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc)) { builder.accept(iw); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 29b74478bec6b..4c1980fb1f673 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -157,11 +157,16 @@ public abstract class ESRestTestCase extends ESTestCase { /** * Convert the entity from a {@link Response} into a map of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. */ public static Map entityAsMap(Response response) throws IOException { return entityAsMap(response.getEntity()); } + /** + * Convert the entity from a {@link Response} into a map of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. + */ public static Map entityAsMap(HttpEntity entity) throws IOException { XContentType xContentType = XContentType.fromMediaType(entity.getContentType().getValue()); // EMPTY and THROW are fine here because `.map` doesn't use named x content or deprecation @@ -174,11 +179,14 @@ public static Map entityAsMap(HttpEntity entity) throws IOExcept ) ) { return parser.map(); + } finally { + EntityUtils.consumeQuietly(entity); } } /** * Convert the entity from a {@link Response} into a list of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. */ public static List entityAsList(Response response) throws IOException { XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); @@ -192,6 +200,8 @@ public static List entityAsList(Response response) throws IOException { ) ) { return parser.list(); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); } } @@ -1603,6 +1613,14 @@ public static Response assertOK(Response response) { return response; } + public static void assertOKAndConsume(Response response) { + try { + assertOK(response); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } + } + public static ObjectPath assertOKAndCreateObjectPath(Response response) throws IOException { assertOK(response); return ObjectPath.createFromResponse(response); @@ -1622,9 +1640,14 @@ public static void assertDocCount(RestClient client, String indexName, long docC } public static void assertAcknowledged(Response response) throws IOException { - assertOK(response); - String jsonBody = EntityUtils.toString(response.getEntity()); - assertThat(jsonBody, containsString("\"acknowledged\":true")); + try { + assertOK(response); + String jsonBody = EntityUtils.toString(response.getEntity()); + assertThat(jsonBody, containsString("\"acknowledged\":true")); + } finally { + // if assertOK throws an exception, still release resources + EntityUtils.consumeQuietly(response.getEntity()); + } } /** diff --git a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java index ce8e3a2574f3e..e2fa31c31a46f 100644 --- a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java +++ b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java @@ -9,23 +9,14 @@ package org.elasticsearch.threadpool; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.node.Node; import org.elasticsearch.telemetry.metric.MeterRegistry; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; public class TestThreadPool extends ThreadPool implements Releasable { - private final CountDownLatch blockingLatch = new CountDownLatch(1); - private volatile boolean returnRejectingExecutor = false; - private volatile ThreadPoolExecutor rejectingExecutor; - public TestThreadPool(String name, ExecutorBuilder... customBuilders) { this(name, Settings.EMPTY, customBuilders); } @@ -34,74 +25,6 @@ public TestThreadPool(String name, Settings settings, ExecutorBuilder... cust super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), MeterRegistry.NOOP, customBuilders); } - @Override - public ExecutorService executor(String name) { - if (returnRejectingExecutor) { - return rejectingExecutor; - } else { - return super.executor(name); - } - } - - public void startForcingRejections() { - if (rejectingExecutor == null) { - createRejectingExecutor(); - } - returnRejectingExecutor = true; - } - - public void stopForcingRejections() { - returnRejectingExecutor = false; - } - - @Override - public void shutdown() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdown(); - } - super.shutdown(); - } - - @Override - public void shutdownNow() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdownNow(); - } - super.shutdownNow(); - } - - private synchronized void createRejectingExecutor() { - if (rejectingExecutor != null) { - return; - } - ThreadFactory factory = EsExecutors.daemonThreadFactory("reject_thread"); - rejectingExecutor = EsExecutors.newFixed( - "rejecting", - 1, - 0, - factory, - getThreadContext(), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - - CountDownLatch startedLatch = new CountDownLatch(1); - rejectingExecutor.execute(() -> { - try { - startedLatch.countDown(); - blockingLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); - try { - startedLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - @Override public void close() { ThreadPool.terminate(this, 10, TimeUnit.SECONDS); diff --git a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java index f4677dc603e64..0e79dfa6e1e79 100644 --- a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java +++ b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java @@ -443,20 +443,4 @@ public void testThreadPoolSchedulesPeriodicFutureTasks() { assertThat(strings, contains("periodic-0", "periodic-1", "periodic-2")); } - public void testSameExecutor() { - final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); - final ThreadPool threadPool = taskQueue.getThreadPool(); - final AtomicBoolean executed = new AtomicBoolean(false); - final AtomicBoolean executedNested = new AtomicBoolean(false); - threadPool.generic().execute(() -> { - final var executor = threadPool.executor(ThreadPool.Names.SAME); - assertSame(EsExecutors.DIRECT_EXECUTOR_SERVICE, executor); - executor.execute(() -> assertTrue(executedNested.compareAndSet(false, true))); - assertThat(executedNested.get(), is(true)); - assertTrue(executed.compareAndSet(false, true)); - }); - taskQueue.runAllRunnableTasks(); - assertThat(executed.get(), is(true)); - } - } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index d3a20235e3a38..07be597c7024e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -130,7 +130,9 @@ public static void createAnnotationsIndexIfNecessary( client.threadPool().getThreadContext(), ML_ORIGIN, requestBuilder.request(), - finalDelegate.delegateFailureAndWrap((l, r) -> checkMappingsListener.onResponse(r.isAcknowledged())), + finalDelegate.delegateFailureAndWrap( + (l, r) -> checkMappingsListener.onResponse(r.isAcknowledged()) + ), client.admin().indices()::aliases ); }); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 016540815fb0a..d4ec7563b868b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -295,7 +296,7 @@ private static void updateWriteAlias( client.threadPool().getThreadContext(), ML_ORIGIN, request, - listener.delegateFailureAndWrap((l, resp) -> l.onResponse(resp.isAcknowledged())), + listener.delegateFailureAndWrap((l, resp) -> l.onResponse(resp.isAcknowledged())), client.admin().indices()::aliases ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java index 466ac58e55bf7..9675d66a183a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,23 +21,17 @@ public GrantRequest() { this.grant = new Grant(); } - public GrantRequest(StreamInput in) throws IOException { - super(in); - this.grant = new Grant(in); - } - public Grant getGrant() { return grant; } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - grant.writeTo(out); + public ActionRequestValidationException validate() { + return grant.validate(null); } @Override - public ActionRequestValidationException validate() { - return grant.validate(null); + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java index 998d35267be37..6e827a4a66a5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java @@ -9,10 +9,11 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.support.MetadataUtils; @@ -39,14 +40,6 @@ public AbstractCreateApiKeyRequest() { this.id = UUIDs.base64UUID(); // because auditing can currently only catch requests but not responses, } - @SuppressWarnings("this-escape") - public AbstractCreateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.id = doReadId(in); - } - - protected abstract String doReadId(StreamInput in) throws IOException; - public String getId() { return id; } @@ -102,4 +95,9 @@ public ActionRequestValidationException validate() { assert refreshPolicy != null : "refresh policy is required"; return validationException; } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java index 34b249d7a8233..0ea772920652b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; @@ -35,11 +32,6 @@ public BaseBulkUpdateApiKeyRequest( this.ids = Objects.requireNonNull(ids, "API key IDs must not be null"); } - public BaseBulkUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.ids = in.readStringCollectionAsList(); - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); @@ -49,12 +41,6 @@ public ActionRequestValidationException validate() { return validationException; } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringCollection(ids); - } - public List getIds() { return ids; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java index 725a9fb197b07..a3958b31e4716 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; @@ -32,17 +29,6 @@ public BaseSingleUpdateApiKeyRequest( this.id = Objects.requireNonNull(id, "API key ID must not be null"); } - public BaseSingleUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.id = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java index e5e3e3f2cabac..a592550484eb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -43,17 +42,6 @@ public BaseUpdateApiKeyRequest( this.expiration = expiration; } - public BaseUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.roleDescriptors = in.readOptionalCollectionAsList(RoleDescriptor::new); - this.metadata = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED)) { - expiration = in.readOptionalTimeValue(); - } else { - expiration = null; - } - } - public Map getMetadata() { return metadata; } @@ -90,12 +78,7 @@ public ActionRequestValidationException validate() { } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalCollection(roleDescriptors); - out.writeGenericMap(metadata); - if (out.getTransportVersion().onOrAfter(TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED)) { - out.writeOptionalTimeValue(expiration); - } + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java index 534c874438e3f..eab74d6250aca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -41,10 +39,6 @@ public BulkUpdateApiKeyRequest( super(ids, roleDescriptors, metadata, expiration); } - public BulkUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java index 32669d5dca447..1d5eb35b99ea7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java @@ -7,18 +7,12 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -55,32 +49,6 @@ public CreateApiKeyRequest( this.metadata = metadata; } - public CreateApiKeyRequest(StreamInput in) throws IOException { - super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - this.name = in.readOptionalString(); - } else { - this.name = in.readString(); - } - this.expiration = in.readOptionalTimeValue(); - this.roleDescriptors = in.readCollectionAsImmutableList(RoleDescriptor::new); - this.refreshPolicy = WriteRequest.RefreshPolicy.readFrom(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - this.metadata = in.readGenericMap(); - } else { - this.metadata = null; - } - } - - @Override - protected String doReadId(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - return in.readString(); - } else { - return UUIDs.base64UUID(); - } - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; @@ -114,23 +82,4 @@ public ActionRequestValidationException validate() { } return validationException; } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeString(id); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeOptionalString(name); - } else { - out.writeString(name); - } - out.writeOptionalTimeValue(expiration); - out.writeCollection(getRoleDescriptors()); - refreshPolicy.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeGenericMap(metadata); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java index a375808def6d7..eea96bcbfcdaf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java @@ -8,9 +8,6 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -37,20 +34,6 @@ public CreateCrossClusterApiKeyRequest( this.metadata = metadata; } - public CreateCrossClusterApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - this.expiration = in.readOptionalTimeValue(); - this.roleDescriptors = in.readCollectionAsImmutableList(RoleDescriptor::new); - this.refreshPolicy = WriteRequest.RefreshPolicy.readFrom(in); - this.metadata = in.readGenericMap(); - } - - @Override - protected String doReadId(StreamInput in) throws IOException { - return in.readString(); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.CROSS_CLUSTER; @@ -67,17 +50,6 @@ public ActionRequestValidationException validate() { return super.validate(); } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeString(name); - out.writeOptionalTimeValue(expiration); - out.writeCollection(roleDescriptors); - refreshPolicy.writeTo(out); - out.writeGenericMap(metadata); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java index 16a95e349cda8..17d5424b630eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java @@ -9,11 +9,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.action.GrantRequest; -import java.io.IOException; import java.util.Objects; /** @@ -30,17 +27,6 @@ public GrantApiKeyRequest() { this.apiKey = new CreateApiKeyRequest(); } - public GrantApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.apiKey = new CreateApiKeyRequest(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - apiKey.writeTo(out); - } - public WriteRequest.RefreshPolicy getRefreshPolicy() { return apiKey.getRefreshPolicy(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java index 9b1e9194d59fd..ffbc5a836633c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -30,10 +28,6 @@ public UpdateApiKeyRequest( super(roleDescriptors, metadata, expiration, id); } - public UpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java index 184ce2c521ce0..04102e571e193 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java @@ -8,11 +8,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -29,10 +27,6 @@ public UpdateCrossClusterApiKeyRequest( super(roleDescriptorBuilder == null ? null : List.of(roleDescriptorBuilder.build()), metadata, expiration, id); } - public UpdateCrossClusterApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.CROSS_CLUSTER; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java index f572c57232b2e..72005bf319c49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java @@ -8,27 +8,14 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.action.GrantRequest; -import java.io.IOException; - public class ActivateProfileRequest extends GrantRequest { public ActivateProfileRequest() { super(); } - public ActivateProfileRequest(StreamInput in) throws IOException { - super(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } - @Override public ActionRequestValidationException validate() { return super.validate(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java index 15e1539570e28..d12cd17d957d4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.index.IndexVersion; @@ -90,8 +90,8 @@ public void testPerformAction() throws Exception { IndicesAliasesRequest request = (IndicesAliasesRequest) invocation.getArguments()[0]; assertThat(request.getAliasActions(), equalTo(expectedAliasActions)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(AcknowledgedResponse.TRUE); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return null; }).when(indicesClient).aliases(Mockito.any(), Mockito.any()); @@ -113,7 +113,7 @@ public void testPerformActionFailure() { Mockito.doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; listener.onFailure(exception); return null; }).when(indicesClient).aliases(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index e7dcc6b441a31..f9fdc0c8362e5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -13,11 +13,11 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.AdminClient; import org.elasticsearch.client.internal.Client; @@ -97,8 +97,8 @@ public void setUpMocks() { ); doAnswer(withResponse(new CreateIndexResponse(true, true, FIRST_CONCRETE_INDEX))).when(indicesAdminClient).create(any(), any()); when(indicesAdminClient.prepareAliases()).thenReturn(new IndicesAliasesRequestBuilder(client)); - doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).aliases(any(), any()); - doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).putTemplate(any(), any()); + doAnswer(withResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS)).when(indicesAdminClient).aliases(any(), any()); + doAnswer(withResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS)).when(indicesAdminClient).putTemplate(any(), any()); clusterAdminClient = mock(ClusterAdminClient.class); doAnswer(invocationOnMock -> { @@ -116,8 +116,9 @@ public void setUpMocks() { when(client.threadPool()).thenReturn(threadPool); when(client.admin()).thenReturn(adminClient); doAnswer(invocationOnMock -> { - ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; - actionListener.onResponse(AcknowledgedResponse.TRUE); + ActionListener actionListener = (ActionListener) invocationOnMock + .getArguments()[2]; + actionListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return null; }).when(client) .execute( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java deleted file mode 100644 index 0221554963892..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.nullValue; - -public class BulkUpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { - public void testSerializationBackwardsCompatibility() throws IOException { - BulkUpdateApiKeyRequest testInstance = createTestInstance(); - BulkUpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); - try { - // Transport is on a version before expiration was introduced, so should always be null - assertThat(deserializedInstance.getExpiration(), nullValue()); - } finally { - dispose(deserializedInstance); - } - } - - @Override - protected BulkUpdateApiKeyRequest createTestInstance() { - final boolean roleDescriptorsPresent = randomBoolean(); - final List descriptorList; - if (roleDescriptorsPresent == false) { - descriptorList = null; - } else { - final int numDescriptors = randomIntBetween(0, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final var ids = randomList(randomInt(5), () -> randomAlphaOfLength(10)); - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - return new BulkUpdateApiKeyRequest(ids, descriptorList, metadata, expiration); - } - - @Override - protected Writeable.Reader instanceReader() { - return BulkUpdateApiKeyRequest::new; - } - - @Override - protected BulkUpdateApiKeyRequest mutateInstance(BulkUpdateApiKeyRequest instance) throws IOException { - Map metadata = ApiKeyTests.randomMetadata(); - long days = randomValueOtherThan(instance.getExpiration().days(), () -> ApiKeyTests.randomFutureExpirationTime().getDays()); - return new BulkUpdateApiKeyRequest( - instance.getIds(), - instance.getRoleDescriptors(), - metadata, - TimeValue.parseTimeValue(days + "d", null, "expiration") - ); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java index 7b1f68ab1bbd2..17298c04709a4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java @@ -7,17 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -130,58 +123,4 @@ public void testRoleDescriptorValidation() { assertThat(ve1.validationErrors().get(5 + i), containsStringIgnoringCase("unknown workflow [" + unknownWorkflows[i] + "]")); } } - - public void testSerialization() throws IOException { - final String name = randomAlphaOfLengthBetween(1, 256); - final TimeValue expiration = randomBoolean() - ? null - : TimeValue.parseTimeValue(randomTimeValue(), "test serialization of create api key"); - final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); - boolean nullOrEmptyRoleDescriptors = randomBoolean(); - final List descriptorList; - if (nullOrEmptyRoleDescriptors) { - descriptorList = randomBoolean() ? null : List.of(); - } else { - final int numDescriptors = randomIntBetween(1, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final CreateApiKeyRequest request = new CreateApiKeyRequest(); - request.setName(name); - request.setExpiration(expiration); - - if (refreshPolicy != request.getRefreshPolicy() || randomBoolean()) { - request.setRefreshPolicy(refreshPolicy); - } - request.setRoleDescriptors(descriptorList); - - boolean testV710Bwc = randomBoolean(); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - if (testV710Bwc) { - out.setTransportVersion(TransportVersions.V_7_9_0); // a version before 7.10 - } - request.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - if (testV710Bwc) { - in.setTransportVersion(TransportVersions.V_7_9_0); - } - final CreateApiKeyRequest serialized = new CreateApiKeyRequest(in); - assertEquals(name, serialized.getName()); - if (false == testV710Bwc) { - assertEquals(request.getId(), serialized.getId()); // API key id is only preserved after v 7.10 - } - assertEquals(expiration, serialized.getExpiration()); - assertEquals(refreshPolicy, serialized.getRefreshPolicy()); - if (nullOrEmptyRoleDescriptors) { - assertThat(serialized.getRoleDescriptors().isEmpty(), is(true)); - } else { - assertEquals(descriptorList, serialized.getRoleDescriptors()); - } - } - } - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java deleted file mode 100644 index a0a9c9b31b430..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL; - -public class CreateCrossClusterApiKeyRequestTests extends AbstractWireSerializingTestCase { - - private String access; - private CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder; - - @Before - public void init() throws IOException { - access = randomCrossClusterApiKeyAccessField(); - roleDescriptorBuilder = CrossClusterApiKeyRoleDescriptorBuilder.parse(access); - } - - @Override - protected Writeable.Reader instanceReader() { - return CreateCrossClusterApiKeyRequest::new; - } - - @Override - protected CreateCrossClusterApiKeyRequest createTestInstance() { - CreateCrossClusterApiKeyRequest request = new CreateCrossClusterApiKeyRequest( - randomAlphaOfLengthBetween(3, 8), - roleDescriptorBuilder, - randomExpiration(), - randomMetadata() - ); - request.setRefreshPolicy(randomFrom(IMMEDIATE, WAIT_UNTIL, NONE)); - return request; - } - - @Override - protected CreateCrossClusterApiKeyRequest mutateInstance(CreateCrossClusterApiKeyRequest instance) throws IOException { - switch (randomIntBetween(1, 4)) { - case 1 -> { - return new CreateCrossClusterApiKeyRequest( - randomValueOtherThan(instance.getName(), () -> randomAlphaOfLengthBetween(3, 8)), - roleDescriptorBuilder, - instance.getExpiration(), - instance.getMetadata() - ); - } - case 2 -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - CrossClusterApiKeyRoleDescriptorBuilder.parse( - randomValueOtherThan(access, CreateCrossClusterApiKeyRequestTests::randomCrossClusterApiKeyAccessField) - ), - instance.getExpiration(), - instance.getMetadata() - ); - } - case 3 -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - roleDescriptorBuilder, - randomValueOtherThan(instance.getExpiration(), CreateCrossClusterApiKeyRequestTests::randomExpiration), - instance.getMetadata() - ); - } - default -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - roleDescriptorBuilder, - instance.getExpiration(), - randomValueOtherThan(instance.getMetadata(), CreateCrossClusterApiKeyRequestTests::randomMetadata) - ); - } - } - } - - private static TimeValue randomExpiration() { - return randomFrom(TimeValue.timeValueHours(randomIntBetween(1, 999)), null); - } - - private static Map randomMetadata() { - return randomFrom( - randomMap( - 0, - 3, - () -> new Tuple<>( - randomAlphaOfLengthBetween(3, 8), - randomFrom(randomAlphaOfLengthBetween(3, 8), randomInt(), randomBoolean()) - ) - ), - null - ); - } - - private static final List ACCESS_CANDIDATES = List.of(""" - { - "search": [ {"names": ["logs"]} ] - }""", """ - { - "search": [ {"names": ["logs"], "query": "abc" } ] - }""", """ - { - "search": [ {"names": ["logs"], "field_security": {"grant": ["*"], "except": ["private"]} } ] - }""", """ - { - "search": [ {"names": ["logs"], "query": "abc", "field_security": {"grant": ["*"], "except": ["private"]} } ] - }""", """ - { - "replication": [ {"names": ["archive"], "allow_restricted_indices": true } ] - }""", """ - { - "replication": [ {"names": ["archive"]} ] - }""", """ - { - "search": [ {"names": ["logs"]} ], - "replication": [ {"names": ["archive"]} ] - }"""); - - public static String randomCrossClusterApiKeyAccessField() { - return randomFrom(ACCESS_CANDIDATES); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java deleted file mode 100644 index 83d74b7e9d413..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.nullValue; - -public class UpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { - public void testSerializationBackwardsCompatibility() throws IOException { - UpdateApiKeyRequest testInstance = createTestInstance(); - UpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); - try { - // Transport is on a version before expiration was introduced, so should always be null - assertThat(deserializedInstance.getExpiration(), nullValue()); - } finally { - dispose(deserializedInstance); - } - } - - @Override - protected UpdateApiKeyRequest createTestInstance() { - final boolean roleDescriptorsPresent = randomBoolean(); - final List descriptorList; - if (roleDescriptorsPresent == false) { - descriptorList = null; - } else { - final int numDescriptors = randomIntBetween(0, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final var id = randomAlphaOfLength(10); - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - return new UpdateApiKeyRequest(id, descriptorList, metadata, expiration); - } - - @Override - protected Writeable.Reader instanceReader() { - return UpdateApiKeyRequest::new; - } - - @Override - protected UpdateApiKeyRequest mutateInstance(UpdateApiKeyRequest instance) throws IOException { - Map metadata = ApiKeyTests.randomMetadata(); - long days = randomValueOtherThan(instance.getExpiration().days(), () -> ApiKeyTests.randomFutureExpirationTime().getDays()); - return new UpdateApiKeyRequest( - instance.getId(), - instance.getRoleDescriptors(), - metadata, - TimeValue.parseTimeValue(days + "d", null, "expiration") - ); - } - -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java index f9faa2731dcc0..f7a0d1a6d35bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java @@ -8,49 +8,15 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Map; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class UpdateCrossClusterApiKeyRequestTests extends ESTestCase { - public void testSerialization() throws IOException { - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - final CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder; - if (randomBoolean()) { - roleDescriptorBuilder = CrossClusterApiKeyRoleDescriptorBuilder.parse(randomCrossClusterApiKeyAccessField()); - } else { - roleDescriptorBuilder = null; - } - - final var request = new UpdateCrossClusterApiKeyRequest(randomAlphaOfLength(10), roleDescriptorBuilder, metadata, expiration); - assertThat(request.getType(), is(ApiKey.Type.CROSS_CLUSTER)); - assertThat(request.validate(), nullValue()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - request.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - final var serialized = new UpdateCrossClusterApiKeyRequest(in); - assertEquals(request.getId(), serialized.getId()); - assertEquals(request.getRoleDescriptors(), serialized.getRoleDescriptors()); - assertEquals(metadata, serialized.getMetadata()); - assertEquals(expiration, serialized.getExpiration()); - assertEquals(request.getType(), serialized.getType()); - } - } - } - public void testNotEmptyUpdateValidation() { final var request = new UpdateCrossClusterApiKeyRequest(randomAlphaOfLength(10), null, null, null); final ActionRequestValidationException ve = request.validate(); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 2057518307fc0..d23f1e4b89a8c 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -553,7 +553,10 @@ public void onFailure(Exception e) { fail("downsample index has not been created"); } }); - downsample(sourceIndex, downsampleIndex, config); + + // Downsample with retries, in case the downsampled index is not ready. + assertBusy(() -> downsample(sourceIndex, downsampleIndex, config), 120, TimeUnit.SECONDS); + // We must wait until the in-progress downsample ends, otherwise data will not be cleaned up assertBusy(() -> assertTrue("In progress downsample did not complete", downsampleListener.success), 60, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 61e425d4b05dd..0ccef9acba088 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -223,7 +224,7 @@ private static String getSearchAliasName(SearchApplication app) { public void putSearchApplication(SearchApplication app, boolean create, ActionListener listener) { createOrUpdateAlias(app, new ActionListener<>() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { + public void onResponse(IndicesAliasesResponse response) { updateSearchApplication(app, create, listener); } @@ -240,7 +241,7 @@ public void onFailure(Exception e) { }); } - private void createOrUpdateAlias(SearchApplication app, ActionListener listener) { + private void createOrUpdateAlias(SearchApplication app, ActionListener listener) { final Metadata metadata = clusterService.state().metadata(); final String searchAliasName = getSearchAliasName(app); @@ -332,14 +333,14 @@ private void removeAlias(String searchAliasName, ActionListener() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - listener.onResponse(AcknowledgedResponse.TRUE); + public void onResponse(IndicesAliasesResponse response) { + listener.onResponse(response); } @Override public void onFailure(Exception e) { if (e instanceof ResourceNotFoundException) { - listener.onResponse(AcknowledgedResponse.TRUE); + listener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); } else { listener.onFailure(e); } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 668ecec0e393d..86245e1c93e97 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.string-templates' esplugin { name 'x-pack-esql' description 'The plugin that powers ESQL for Elasticsearch' @@ -69,27 +70,50 @@ tasks.named("test").configure { doLast { List signatures = file("${projectDir}/build/testrun/test/temp/esql/functions/signature").list().findAll {it.endsWith("svg")} List types = file("${projectDir}/build/testrun/test/temp/esql/functions/types").list().findAll {it.endsWith("asciidoc")} - int count = signatures == null ? 0 : signatures.size() + int count = types == null ? 0 : types.size() + Closure readExample = line -> { + line.replaceAll(/read-example::([^\[]+)\[tag=([^,\]]+)(, ?json)?\]/, { + String file = it[1] + String tag = it[2] + boolean isJson = it[3] + String allExamples = new File("${projectDir}/qa/testFixtures/src/main/resources/${file}").text + int start = allExamples.indexOf("tag::${tag}[]") + int end = allExamples.indexOf("end::${tag}[]", start) + if (start < 0 || end < 0) { + throw new IllegalAccessException("can't find example ${file}::${tag}") + } + // Slice out the newlines + start = allExamples.indexOf('\n', start) + 1 + end = allExamples.lastIndexOf('\n', end) + String example = allExamples.substring(start, end) + if (isJson) { + example = example.replace("\"", "\\\"").replace("\n", "\\n") + } + return example; + }) + } if (count == 0) { logger.quiet("ESQL Docs: No function signatures created. Skipping sync.") } else if (count == 1) { - logger.quiet("ESQL Docs: Only updated $signatures and $types, patching them into place") + logger.quiet("ESQL Docs: Only files related to $types, patching them into place") project.sync { from "${projectDir}/build/testrun/test/temp/esql/functions" into "${rootDir}/docs/reference/esql/functions" - include '**/*.asciidoc', '**/*.svg' + include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { - include '/*.asciidoc', '**/*.asciidoc', '**/*.svg', 'README.md' + include '/*.asciidoc', '**/*.asciidoc', '**/*.md', '**/*.json', '**/*.svg', 'README.md' } + filter readExample } } else { project.sync { from "${projectDir}/build/testrun/test/temp/esql/functions" into "${rootDir}/docs/reference/esql/functions" - include '**/*.asciidoc', '**/*.svg' + include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { include '/*.asciidoc', 'README.md' } + filter readExample } } } @@ -222,3 +246,59 @@ tasks.register("regen") { } } } + +tasks.named("spotlessJava") { dependsOn stringTemplates } +tasks.named('checkstyleMain').configure { + excludes = [ "**/*.java.st" ] +} + +def prop(Type, type, TYPE, BYTES, Array) { + return [ + "Type" : Type, + "type" : type, + "TYPE" : TYPE, + "BYTES" : BYTES, + "Array" : Array, + + "int" : type == "int" ? "true" : "", + "long" : type == "long" ? "true" : "", + "double" : type == "double" ? "true" : "", + "BytesRef" : type == "BytesRef" ? "true" : "", + "boolean" : type == "boolean" ? "true" : "", + ] +} + +tasks.named('stringTemplates').configure { + var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray") + var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray") + var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") + var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") + var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") + // enrich + File enrichResultBuilderInput = file("src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st") + template { + it.properties = intProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java" + } + template { + it.properties = longProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java" + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 43181a344e268..f365a2ed78610 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BooleanBuilder permi @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BooleanBlock block); - @Override BooleanBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 09c436e805d57..32627a0e0d36b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -71,55 +71,6 @@ public BooleanBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BooleanBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(BooleanBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final BooleanVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBoolean(vector.getBoolean(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBoolean(block.getBoolean(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BooleanBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 5f5e1f9caa488..a6c75dbc1122f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -229,19 +229,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BytesRefBuilder perm @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BytesRefBlock block); - @Override BytesRefBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index aed422b0c0104..4ef7ed4084228 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -78,56 +78,6 @@ protected void writeNullValue() { values.append(BytesRefBlock.NULL_VALUE); } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BytesRefBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(BytesRefBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - BytesRef scratch = new BytesRef(); - final BytesRefVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBytesRef(vector.getBytesRef(p, scratch)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBytesRef(block.getBytesRef(i++, scratch)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BytesRefBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 27d70caaa18fe..a682c2cba019e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.DoubleBuilder permit @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(DoubleBlock block); - @Override DoubleBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 427127784869a..5921c2daa9f92 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -71,55 +71,6 @@ public DoubleBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((DoubleBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(DoubleBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final DoubleVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendDouble(vector.getDouble(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendDouble(block.getDouble(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public DoubleBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index a34d50bf6ff55..e9d606b51c6a1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.IntBuilder permits I @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(IntBlock block); - @Override IntBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index aaf46798fd789..85f943004de29 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -71,55 +71,6 @@ public IntBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((IntBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(IntBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final IntVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendInt(vector.getInt(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendInt(block.getInt(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public IntBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 21c3eb4257b8d..3e1c5fcfaac95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.LongBuilder permits @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(LongBlock block); - @Override LongBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index 5d8daf306809d..d24ae214da63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -71,55 +71,6 @@ public LongBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((LongBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(LongBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final LongVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendLong(vector.getLong(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendLong(block.getLong(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public LongBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 7fc92da1943ac..0e34eaa68881f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -183,12 +183,6 @@ interface Builder extends BlockLoader.Builder, Releasable { */ Builder endPositionEntry(); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(Block block); - /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index c2ac99a7c8489..3df75f4bc1c56 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -172,11 +172,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - return appendNull(); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 8d3497a66a2d7..2751cd31fd362 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -149,11 +149,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException("DocBlock doesn't support appendBlockAndMerge"); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java index 8616d7a7e1bc6..fd9dd6a479298 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -165,11 +165,6 @@ public void close() { blockFactory.adjustBreaker(-ordsSize(ords.length)); } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException(); - } - @Override public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 3850e3da7c796..331a5713fa3d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -277,19 +277,6 @@ $endif$ @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition($Type$Block block); - @Override $Type$Block build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 5b432f1c62968..fab3be0be4233 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -113,66 +113,6 @@ $if(BytesRef)$ } $endif$ - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition(($Type$Block) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition($Type$Block block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } -$if(BytesRef)$ - BytesRef scratch = new BytesRef(); -$endif$ - final $Type$Vector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { -$if(BytesRef)$ - appendBytesRef(vector.getBytesRef(p, scratch)); -$else$ - append$Type$(vector.get$Type$(p)); -$endif$ - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { -$if(BytesRef)$ - appendBytesRef(block.getBytesRef(i++, scratch)); -$else$ - append$Type$(block.get$Type$(i++)); -$endif$ - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public $Type$BlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java deleted file mode 100644 index 9c1b02aa74107..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.data; - -import org.elasticsearch.compute.operator.ComputeTestCase; - -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class BlockBuilderAppendBlockTests extends ComputeTestCase { - - public void testBasic() { - BlockFactory blockFactory = blockFactory(); - IntBlock src = blockFactory.newIntBlockBuilder(10) - .appendInt(1) - .appendNull() - .beginPositionEntry() - .appendInt(4) - .appendInt(6) - .endPositionEntry() - .appendInt(10) - .appendInt(20) - .appendInt(30) - .appendNull() - .beginPositionEntry() - .appendInt(1) - .endPositionEntry() - .build(); - // copy position by position - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - for (int i = 0; i < src.getPositionCount(); i++) { - try (IntBlock filter = src.filter(i)) { - dst.appendAllValuesToCurrentPosition(filter); - } - } - try (IntBlock block = dst.build()) { - assertThat(block, equalTo(src)); - } - } - // copy all block - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - try (IntBlock block = dst.appendAllValuesToCurrentPosition(src).build()) { - assertThat(block.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - try (Block dst = randomlyDivideAndMerge(src)) { - assertThat(dst.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(dst, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - - public void testRandomNullBlock() { - BlockFactory blockFactory = blockFactory(); - IntBlock.Builder src = blockFactory.newIntBlockBuilder(10); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - src.appendInt(101); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - IntBlock block = src.build(); - assertThat(block.getPositionCount(), equalTo(3)); - assertTrue(block.isNull(0)); - assertThat(block.getInt(1), equalTo(101)); - assertTrue(block.isNull(2)); - try (Block flatten = randomlyDivideAndMerge(block)) { - assertThat(flatten.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); - } - } - - public void testRandom() { - ElementType elementType = randomFrom(ElementType.INT, ElementType.BYTES_REF, ElementType.BOOLEAN); - Block block = BasicBlockTests.randomBlock( - elementType, - randomIntBetween(1, 1024), - randomBoolean(), - 0, - between(1, 16), - 0, - between(0, 16) - ).block(); - - block = randomlyDivideAndMerge(block); - block.close(); - } - - private Block randomlyDivideAndMerge(Block block) { - while (block.getPositionCount() > 1 || randomBoolean()) { - int positionCount = block.getPositionCount(); - int offset = 0; - Block.Builder builder = block.elementType() - .newBlockBuilder(randomIntBetween(1, 100), TestBlockFactory.getNonBreakingInstance()); - List expected = new ArrayList<>(); - while (offset < positionCount) { - int length = randomIntBetween(1, positionCount - offset); - int[] positions = new int[length]; - for (int i = 0; i < length; i++) { - positions[i] = offset + i; - } - offset += length; - Block sub = block.filter(positions); - expected.add(extractAndFlattenBlockValues(sub)); - builder.appendAllValuesToCurrentPosition(sub); - sub.close(); - } - block.close(); - block = builder.build(); - assertThat(block.getPositionCount(), equalTo(expected.size())); - for (int i = 0; i < block.getPositionCount(); i++) { - assertThat(BlockUtils.toJavaObject(block, i), equalTo(expected.get(i))); - } - } - return block; - } - - static Object extractAndFlattenBlockValues(Block block) { - List values = new ArrayList<>(); - for (int i = 0; i < block.getPositionCount(); i++) { - Object v = BlockUtils.toJavaObject(block, i); - if (v == null) { - continue; - } - if (v instanceof List l) { - values.addAll(l); - } else { - values.add(v); - } - } - if (values.isEmpty()) { - return null; - } else if (values.size() == 1) { - return values.get(0); - } else { - return values; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index a2b074c1403a0..4595b26ca27aa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -113,12 +113,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public IntBlock build() { return builder.build(); @@ -174,12 +168,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public LongBlock build() { return builder.build(); @@ -235,12 +223,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public DoubleBlock build() { return builder.build(); @@ -296,12 +278,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BytesRefBlock build() { return builder.build(); @@ -360,12 +336,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BooleanBlock build() { return builder.build(); diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 9a494f6309997..2f681fc23bf31 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -133,14 +133,12 @@ protected boolean supportsAsync() { private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { if (supportsAsync()) { - return RestEsqlTestCase.runEsqlAsync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlAsync(requestObject); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlSync(requestObject); } } - private static final List NO_WARNINGS = List.of(); - public void testCount() throws Exception { { Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)"); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 349954450904d..4d8770a6ff112 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -35,6 +35,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.regex.Pattern; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; @@ -143,7 +144,11 @@ protected void shouldSkipTest(String testName) throws IOException { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - Map answer = runEsql(builder.query(testCase.query), testCase.expectedWarnings(false)); + Map answer = runEsql( + builder.query(testCase.query), + testCase.expectedWarnings(false), + testCase.expectedWarningsRegex() + ); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); var metadata = answer.get("columns"); @@ -160,12 +165,16 @@ protected final void doTest() throws Throwable { assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger); } - private Map runEsql(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + private Map runEsql( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { if (mode == Mode.ASYNC) { assert supportsAsync(); - return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings); + return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings); + return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index e04435b715c99..a670b11c61780 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -144,7 +144,7 @@ public void wipeTestData() throws IOException { public void testNonExistentEnrichPolicy() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris"), List.of()) + () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris")) ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), @@ -188,14 +188,12 @@ public void testMatchField_ImplicitFieldsList_WithStats() throws IOException { private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { if (mode == Mode.ASYNC) { - return RestEsqlTestCase.runEsqlAsync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlAsync(requestObject); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlSync(requestObject); } } - private static final List NO_WARNINGS = List.of(); - @Override protected boolean preserveClusterUponCompletion() { return true; diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index ab288de4ad27d..a2296168c5fc0 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -50,10 +50,10 @@ import java.util.Map; import java.util.Set; import java.util.function.IntFunction; +import java.util.regex.Pattern; import static java.util.Collections.emptySet; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -76,6 +76,7 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(RestEsqlTestCase.class); private static final List NO_WARNINGS = List.of(); + private static final List NO_WARNINGS_REGEX = List.of(); private static final String MAPPING_ALL_TYPES; @@ -393,7 +394,7 @@ public void testCSVNoHeaderMode() throws IOException { options.addHeader("Content-Type", mediaType); options.addHeader("Accept", "text/csv; header=absent"); request.setOptions(options); - HttpEntity entity = performRequest(request, List.of()); + HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); String actual = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); assertEquals("keyword0,0\r\n", actual); } @@ -452,7 +453,7 @@ public void testOutOfRangeComparisons() throws IOException { "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" ); - var result = runEsql(query, expectedWarnings, mode); + var result = runEsql(query, expectedWarnings, NO_WARNINGS_REGEX, mode); var values = as(result.get("values"), ArrayList.class); assertThat( @@ -478,7 +479,8 @@ public void testWarningHeadersOnFailedConversions() throws IOException { bulkLoadTestData(count); Request request = prepareRequest(SYNC); - var query = fromIndex() + " | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; + var query = fromIndex() + + " | sort integer asc | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; var mediaType = attachBody(new RequestObjectBuilder().query(query).build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); @@ -493,7 +495,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { int expectedWarnings = Math.min(count / 2, 20); var warnings = response.getWarnings(); assertThat(warnings.size(), is(1 + expectedWarnings)); - var firstHeader = "Line 1:36: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + var firstHeader = "Line 1:55: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + "treating result as null. Only first 20 failures recorded."; assertThat(warnings.get(0), containsString(firstHeader)); for (int i = 1; i <= expectedWarnings; i++) { @@ -660,22 +662,35 @@ private static String expectedTextBody(String format, int count, @Nullable Chara } public Map runEsql(RequestObjectBuilder requestObject) throws IOException { - return runEsql(requestObject, NO_WARNINGS, mode); + return runEsql(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX, mode); } public static Map runEsqlSync(RequestObjectBuilder requestObject) throws IOException { - return runEsqlSync(requestObject, NO_WARNINGS); + return runEsqlSync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); } - static Map runEsql(RequestObjectBuilder requestObject, List expectedWarnings, Mode mode) throws IOException { + public static Map runEsqlAsync(RequestObjectBuilder requestObject) throws IOException { + return runEsqlAsync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); + } + + static Map runEsql( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex, + Mode mode + ) throws IOException { if (mode == ASYNC) { - return runEsqlAsync(requestObject, expectedWarnings); + return runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); } else { - return runEsqlSync(requestObject, expectedWarnings); + return runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); } } - public static Map runEsqlSync(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + public static Map runEsqlSync( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { requestObject.build(); Request request = prepareRequest(SYNC); String mediaType = attachBody(requestObject, request); @@ -691,11 +706,15 @@ public static Map runEsqlSync(RequestObjectBuilder requestObject } request.setOptions(options); - HttpEntity entity = performRequest(request, expectedWarnings); + HttpEntity entity = performRequest(request, expectedWarnings, expectedWarningsRegex); return entityToMap(entity, requestObject.contentType()); } - public static Map runEsqlAsync(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + public static Map runEsqlAsync( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { addAsyncParameters(requestObject); requestObject.build(); Request request = prepareRequest(ASYNC); @@ -729,7 +748,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec // no id returned from an async call, must have completed immediately and without keep_on_completion assertThat(requestObject.keepOnCompletion(), either(nullValue()).or(is(false))); assertThat((boolean) json.get("is_running"), is(false)); - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); json.remove("is_running"); // remove this to not mess up later map assertions return Collections.unmodifiableMap(json); } else { @@ -738,7 +757,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec if ((boolean) json.get("is_running") == false) { // must have completed immediately so keep_on_completion must be true assertThat(requestObject.keepOnCompletion(), is(true)); - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); // we already have the results, but let's remember them so that we can compare to async get initialColumns = json.get("columns"); initialValues = json.get("values"); @@ -762,7 +781,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec assertEquals(initialValues, result.get("values")); } - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); assertDeletable(id); return removeAsyncProperties(result); } @@ -836,7 +855,7 @@ static String runEsqlAsTextWithFormat(RequestObjectBuilder builder, String forma } request.setOptions(options); - HttpEntity entity = performRequest(request, List.of()); + HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); return Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); } @@ -869,8 +888,9 @@ private static String attachBody(RequestObjectBuilder requestObject, Request req return mediaType; } - private static HttpEntity performRequest(Request request, List allowedWarnings) throws IOException { - return assertWarnings(performRequest(request), allowedWarnings); + private static HttpEntity performRequest(Request request, List allowedWarnings, List allowedWarningsRegex) + throws IOException { + return assertWarnings(performRequest(request), allowedWarnings, allowedWarningsRegex); } private static Response performRequest(Request request) throws IOException { @@ -883,13 +903,13 @@ private static Response performRequest(Request request) throws IOException { return response; } - private static HttpEntity assertWarnings(Response response, List allowedWarnings) { + private static HttpEntity assertWarnings(Response response, List allowedWarnings, List allowedWarningsRegex) { List warnings = new ArrayList<>(response.getWarnings()); warnings.removeAll(mutedWarnings()); if (shouldLog()) { LOGGER.info("RESPONSE warnings (after muted)={}", warnings); } - assertMap(warnings, matchesList(allowedWarnings)); + EsqlTestUtils.assertWarnings(warnings, allowedWarnings, allowedWarningsRegex); return response.getEntity(); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index fc8f80a19f09f..e6470e0eb2d05 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -43,11 +43,15 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import static java.util.Collections.emptyList; import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.ql.TestUtils.of; import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertTrue; public final class EsqlTestUtils { @@ -244,4 +248,14 @@ public static String randomEnrichCommand(String name, Enrich.Mode mode, String m all.addAll(after); return String.join(" | ", all); } + + public static void assertWarnings(List warnings, List allowedWarnings, List allowedWarningsRegex) { + if (allowedWarningsRegex.isEmpty()) { + assertMap(warnings.stream().sorted().toList(), matchesList(allowedWarnings.stream().sorted().toList())); + } else { + for (String warning : warnings) { + assertTrue("Unexpected warning: " + warning, allowedWarningsRegex.stream().anyMatch(x -> x.matcher(warning).matches())); + } + } + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md index fdd52c6aac229..dad5ae2828174 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -102,7 +102,7 @@ include::{esql-specs}/floats.csv-spec[tag=sin-result]
What is this asciidoc syntax? -The first section is a source code block for the ES|QL query: +The first section is a source code block for the ES|QL query: - a [source](https://docs.asciidoctor.org/asciidoc/latest/verbatim/source-blocks/) code block (delimited by `----`) - `source.merge.styled,esql` indicates custom syntax highlighting for ES|QL @@ -176,3 +176,44 @@ row a = [true, false, false, true] ``` That skips nodes that don't have the `esql.mv_sort` feature. + + +### Warnings + +Some queries can return warnings, eg. for number overflows or when a multi-value is passed to a funciton +that does not support it. + +Each CSV-SPEC test has to also assert all the expected warnings. + +Warnings can be specified as plain text or as a regular expression (but a single test cannot have a mix of both). +Each warning has to be specified on a single row, between the query and the result, prefixed by `warning:` or `warningRegex:`. +If multiple warnings are defined, the order is not relevant. + +This is an example of how to test a query that returns two warnings: + +```csv-spec +addLongOverflow +row max = 9223372036854775807 | eval sum = max + 1 | keep sum; + +warning:Line 1:44: evaluation of [max + 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:44: java.lang.ArithmeticException: long overflow + +sum:long +null +; +``` + +The same, using regular expressions: + +```csv-spec +addLongOverflow +row max = 9223372036854775807 | eval sum = max + 1 | keep sum; + +warningRegex:Line \d+:\d+: evaluation of \[max + 1\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:Line \d+:\d+: java.lang.ArithmeticException: long overflow + +sum:long +null +; +``` + diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8f9ce9968d89d..5b3b6235ccb8b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -261,36 +261,36 @@ int:integer |dt:date // end::to_datetime-int-result[] ; -autoBucketSimpleMonth -// tag::auto_bucket_month[] +bucketSimpleMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_month[] ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=AUTO_BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::auto_bucket_month[] +| EVAL bucket=BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_month[] ; -// tag::auto_bucket_month-result[] +// tag::bucket_month-result[] date:datetime | bucket:datetime 1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z -// end::auto_bucket_month-result[] +// end::bucket_month-result[] ; -autoBucketSimpleWeek -// tag::auto_bucket_week[] +bucketSimpleWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_week[] ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=AUTO_BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::auto_bucket_week[] +| EVAL bucket=BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_week[] ; -// tag::auto_bucket_week-result[] +// tag::bucket_week-result[] date:datetime | bucket:datetime 1985-07-09T00:00:00.000Z | 1985-07-08T00:00:00.000Z -// end::auto_bucket_week-result[] +// end::bucket_week-result[] ; -autoBucketMonth +bucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd; @@ -308,10 +308,10 @@ hire_date:date | hd:date 1985-11-21T00:00:00.000Z | 1985-11-01T00:00:00.000Z ; -autoBucketWeek +bucketWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = auto_bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| eval hd = bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd; @@ -350,10 +350,10 @@ from employees | where birth_date > now() | sort emp_no asc | keep emp_no, birth emp_no:integer | birth_date:date ; -autoBucketYearInAgg#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket = AUTO_BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) +| EVAL bucket = BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) | STATS COUNT(*) by bucket | sort bucket; @@ -361,12 +361,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsString#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsString#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = "1999-01-01T00:00:00Z" | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | STATS COUNT(*) by bucket | sort bucket; @@ -374,12 +374,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsConcat#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsConcat#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = CONCAT("1999-01-01", "T", "00:00:00Z") | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | STATS COUNT(*) by bucket | sort bucket; @@ -387,12 +387,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsDate#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsDate#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = TO_DATETIME("1999-01-01T00:00:00.000Z") | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | keep bucket_start, bucket_end, bucket | STATS COUNT(*) by bucket | sort bucket; @@ -401,31 +401,31 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsRename#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsRename#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = "1999-01-01T00:00:00Z" | EVAL bucket_end = NOW() | RENAME bucket_end as be, bucket_start as bs -| STATS c = COUNT(*) by AUTO_BUCKET(hire_date, 5, bs, be) +| STATS c = COUNT(*) by BUCKET(hire_date, 5, bs, be) | SORT c ; -c:long | AUTO_BUCKET(hire_date, 5, bs, be):date +c:long | BUCKET(hire_date, 5, bs, be):date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketMonthInAgg -// tag::auto_bucket_in_agg[] +bucketMonthInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_in_agg[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bucket = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS AVG(salary) BY bucket | SORT bucket -// end::auto_bucket_in_agg[] +// end::bucket_in_agg[] ; -// tag::auto_bucket_in_agg-result[] +// tag::bucket_in_agg-result[] AVG(salary):double | bucket:date 46305.0 | 1985-02-01T00:00:00.000Z 44817.0 | 1985-05-01T00:00:00.000Z @@ -433,7 +433,7 @@ AVG(salary):double | bucket:date 49095.0 | 1985-09-01T00:00:00.000Z 51532.0 | 1985-10-01T00:00:00.000Z 54539.75 | 1985-11-01T00:00:00.000Z -// end::auto_bucket_in_agg-result[] +// end::bucket_in_agg-result[] ; evalDateDiffInNanoAndMicroAndMilliSeconds#[skip:-8.12.99, reason:date_diff added in 8.13] @@ -950,17 +950,17 @@ birth_date:datetime 1953-04-21T00:00:00.000Z ; -docsAutoBucketMonth -//tag::docsAutoBucketMonth[] +docsBucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonth[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | KEEP hire_date, month | SORT hire_date -//end::docsAutoBucketMonth[] +//end::docsBucketMonth[] ; -//tag::docsAutoBucketMonth-result[] +//tag::docsBucketMonth-result[] hire_date:date | month:date 1985-02-18T00:00:00.000Z|1985-02-01T00:00:00.000Z 1985-02-24T00:00:00.000Z|1985-02-01T00:00:00.000Z @@ -973,20 +973,20 @@ FROM employees 1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z 1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z 1985-11-21T00:00:00.000Z|1985-11-01T00:00:00.000Z -//end::docsAutoBucketMonth-result[] +//end::docsBucketMonth-result[] ; -docsAutoBucketMonthlyHistogram -//tag::docsAutoBucketMonthlyHistogram[] +docsBucketMonthlyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonthlyHistogram[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS hires_per_month = COUNT(*) BY month | SORT month -//end::docsAutoBucketMonthlyHistogram[] +//end::docsBucketMonthlyHistogram[] ; -//tag::docsAutoBucketMonthlyHistogram-result[] +//tag::docsBucketMonthlyHistogram-result[] hires_per_month:long | month:date 2 |1985-02-01T00:00:00.000Z 1 |1985-05-01T00:00:00.000Z @@ -994,20 +994,20 @@ FROM employees 1 |1985-09-01T00:00:00.000Z 2 |1985-10-01T00:00:00.000Z 4 |1985-11-01T00:00:00.000Z -//end::docsAutoBucketMonthlyHistogram-result[] +//end::docsBucketMonthlyHistogram-result[] ; -docsAutoBucketWeeklyHistogram -//tag::docsAutoBucketWeeklyHistogram[] +docsBucketWeeklyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketWeeklyHistogram[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL week = AUTO_BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL week = BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS hires_per_week = COUNT(*) BY week | SORT week -//end::docsAutoBucketWeeklyHistogram[] +//end::docsBucketWeeklyHistogram[] ; -//tag::docsAutoBucketWeeklyHistogram-result[] +//tag::docsBucketWeeklyHistogram-result[] hires_per_week:long | week:date 2 |1985-02-18T00:00:00.000Z 1 |1985-05-13T00:00:00.000Z @@ -1015,40 +1015,40 @@ FROM employees 1 |1985-09-16T00:00:00.000Z 2 |1985-10-14T00:00:00.000Z 4 |1985-11-18T00:00:00.000Z -//end::docsAutoBucketWeeklyHistogram-result[] +//end::docsBucketWeeklyHistogram-result[] ; -docsAutoBucketLast24hr#[skip:-8.12.99, reason:date type is supported in 8.13] -//tag::docsAutoBucketLast24hr[] +docsBucketLast24hr#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketLast24hr[] FROM sample_data | WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() -| EVAL bucket = AUTO_BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) +| EVAL bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) | STATS COUNT(*) BY bucket -//end::docsAutoBucketLast24hr[] +//end::docsBucketLast24hr[] ; COUNT(*):long | bucket:date ; -docsGettingStartedAutoBucket#[skip:-8.12.99, reason:date type is supported in 8.13] -// tag::gs-auto_bucket[] +docsGettingStartedBucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket[] FROM sample_data | KEEP @timestamp -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) -// end::gs-auto_bucket[] +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) +// end::gs-bucket[] | LIMIT 0 ; @timestamp:date | bucket:date ; -docsGettingStartedAutoBucketStatsBy -// tag::gs-auto_bucket-stats-by[] +docsGettingStartedBucketStatsBy#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS COUNT(*) BY bucket -// end::gs-auto_bucket-stats-by[] +// end::gs-bucket-stats-by[] | SORT bucket ; @@ -1057,13 +1057,13 @@ COUNT(*):long | bucket:date 5 |2023-10-23T13:00:00.000Z ; -docsGettingStartedAutoBucketStatsByMedian -// tag::gs-auto_bucket-stats-by-median[] +docsGettingStartedBucketStatsByMedian#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by-median[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS median_duration = MEDIAN(event_duration) BY bucket -// end::gs-auto_bucket-stats-by-median[] +// end::gs-bucket-stats-by-median[] | SORT bucket ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 9b06e9a0a8b23..85b665d717449 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -424,3 +424,69 @@ emp_no:i -10002 -10003 ; + +sortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| SORT emp_no + salary ASC +| EVAL emp_no = -emp_no +| LIMIT 10 +| EVAL sum = -emp_no + salary +| KEEP emp_no, salary, sum +; + + emp_no:i | salary:i | sum:i +-10015 |25324 |35339 +-10035 |25945 |35980 +-10092 |25976 |36068 +-10048 |26436 |36484 +-10057 |27215 |37272 +-10084 |28035 |38119 +-10026 |28336 |38362 +-10068 |28941 |39009 +-10060 |29175 |39235 +-10042 |30404 |40446 +; + +sortConcat1#[skip:-8.13.99,reason:supported in 8.14] +from employees +| sort concat(left(last_name, 1), left(first_name, 1)), salary desc +| keep first_name, last_name, salary +| eval ll = left(last_name, 1), lf = left(first_name, 1) +| limit 10 +; + + first_name:keyword | last_name:keyword | salary:integer|ll:keyword|lf:keyword +Mona |Azuma |46595 |A |M +Satosi |Awdeh |50249 |A |S +Brendon |Bernini |33370 |B |B +Breannda |Billingsley |29175 |B |B +Cristinel |Bouloucos |58715 |B |C +Charlene |Brattka |28941 |B |C +Margareta |Bierman |41933 |B |M +Mokhtar |Bernatsky |38992 |B |M +Parto |Bamford |61805 |B |P +Premal |Baek |52833 |B |P +; + +sortConcat2#[skip:-8.13.99,reason:supported in 8.14] +from employees +| eval ln = last_name, fn = first_name, concat = concat(left(last_name, 1), left(first_name, 1)) +| sort concat(left(ln, 1), left(fn, 1)), salary desc +| keep f*, l*, salary +| eval c = concat(left(last_name, 1), left(first_name, 1)) +| drop *name, lan* +| limit 10 +; + + fn:keyword | ln:keyword | salary:integer| c:keyword +Mona |Azuma |46595 |AM +Satosi |Awdeh |50249 |AS +Brendon |Bernini |33370 |BB +Breannda |Billingsley |29175 |BB +Cristinel |Bouloucos |58715 |BC +Charlene |Brattka |28941 |BC +Margareta |Bierman |41933 |BM +Mokhtar |Bernatsky |38992 |BM +Parto |Bamford |61805 |BP +Premal |Baek |52833 |BP +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 0882fec5ec0bf..8f8f218fd9821 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -257,10 +257,10 @@ emp_no:integer | salary_change:double | a1:double 10005 | [-2.14,13.07] | [-2.14,13.07] ; -autoBucket +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bh = auto_bucket(height, 20, 1.41, 2.10) +| EVAL bh = bucket(height, 20, 1.41, 2.10) | SORT hire_date, height | KEEP hire_date, height, bh ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 3e1d1b19a7f67..026e3d922d00d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -642,17 +642,17 @@ emp_no:integer | salary_change.long:long | a1:long 10005 | [-2, 13] | [-2, 13] ; -autoBucket -// tag::auto_bucket[] +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| EVAL bs = BUCKET(salary, 20, 25324, 74999) | SORT hire_date, salary | KEEP hire_date, salary, bs -// end::auto_bucket[] +// end::bucket[] ; -// tag::auto_bucket-result[] +// tag::bucket-result[] hire_date:date | salary:integer | bs:double 1985-02-18T00:00:00.000Z | 66174 | 65000.0 1985-02-24T00:00:00.000Z | 26436 | 25000.0 @@ -665,19 +665,19 @@ hire_date:date | salary:integer | bs:double 1985-11-20T00:00:00.000Z | 33956 | 30000.0 1985-11-20T00:00:00.000Z | 74999 | 70000.0 1985-11-21T00:00:00.000Z | 56371 | 55000.0 -// end::auto_bucket-result[] +// end::bucket-result[] ; -docsAutoBucketNumeric -//tag::docsAutoBucketNumeric[] +docsBucketNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketNumeric[] FROM employees -| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| EVAL bs = BUCKET(salary, 20, 25324, 74999) | STATS COUNT(*) by bs | SORT bs -//end::docsAutoBucketNumeric[] +//end::docsBucketNumeric[] ; -//tag::docsAutoBucketNumeric-result[] +//tag::docsBucketNumeric-result[] COUNT(*):long | bs:double 9 |25000.0 9 |30000.0 @@ -689,7 +689,7 @@ FROM employees 9 |60000.0 8 |65000.0 8 |70000.0 -//end::docsAutoBucketNumeric-result[] +//end::docsBucketNumeric-result[] ; cos diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 58c1cf3dc9174..8e0da1dd354ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -166,6 +166,27 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 ; + +inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] +required_feature: esql.mv_warn + +from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; +ignoreOrder:true +warningRegex:Line \d+:\d+: evaluation of \[ip0==ip1\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:Line \d+:\d+: evaluation of \[eq in \(ip0, ip1\)\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:java.lang.IllegalArgumentException: single-value function encountered multi-value + +card:keyword |host:keyword |ip0:ip |ip1:ip |eq:ip +eth0 |alpha |127.0.0.1 |127.0.0.1 |127.0.0.1 +eth1 |alpha |::1 |::1 |::1 +eth0 |beta |127.0.0.1 |::1 |::1 +eth1 |beta |127.0.0.1 |127.0.0.2 |127.0.0.2 +eth1 |beta |127.0.0.1 |128.0.0.1 |128.0.0.1 +lo0 |gamma |fe80::cae2:65ff:fece:feb9 |fe81::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:feb9 +eth0 |gamma |fe80::cae2:65ff:fece:feb9 |127.0.0.3 |127.0.0.3 +eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 +; + cidrMatchSimple required_feature: esql.mv_warn diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 33b61c95ed0ed..d0e18426f03ab 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -7,8 +7,8 @@ synopsis:keyword "double asin(number:double|integer|long|unsigned_long)" "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" "double avg(number:double|integer|long)" +"double|date bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" "boolean cidr_match(ip:ip, blockX...:keyword|text)" @@ -22,7 +22,7 @@ synopsis:keyword "long date_extract(datePart:keyword|text, date:date)" "keyword date_format(?dateFormat:keyword|text, date:date)" "date date_parse(?datePattern:keyword|text, dateString:keyword|text)" -"date date_trunc(interval:keyword, date:date)" +"date date_trunc(interval:date_period|time_duration, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" @@ -117,8 +117,8 @@ acos |number |"double|integer|long|unsigne asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] avg |number |"double|integer|long" |[""] +bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[, CIDR block to test the IP against.] @@ -132,7 +132,7 @@ date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era., Date expression] date_format |[dateFormat, date] |["keyword|text", date] |[A valid date pattern, Date expression] date_parse |[datePattern, dateString] |["keyword|text", "keyword|text"] |[A valid date pattern, A string representing a date] -date_trunc |[interval, date] |[keyword, date] |[Interval; expressed using the timespan literal syntax., Date expression] +date_trunc |[interval, date] |["date_period|time_duration", date] |[Interval; expressed using the timespan literal syntax., Date expression] e |null |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[, ] floor |number |"double|integer|long|unsigned_long" |[""] @@ -228,8 +228,8 @@ acos |Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine asin |Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. atan |Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. -auto_bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. avg |The average of a numeric field. +bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. @@ -340,8 +340,8 @@ acos |double asin |double |false |false |false atan |double |false |false |false atan2 |double |[false, false] |false |false -auto_bucket |"double|date" |[false, false, false, false]|false |false avg |double |false |false |true +bucket |"double|date" |[false, false, false, false]|false |false case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |[false, false] |true |false ceil |"double|integer|long|unsigned_long" |false |false |false cidr_match |boolean |[false, false] |true |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index fb2d46baf27ff..867ff127c90e8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1585,3 +1585,27 @@ c:l | k1:i | languages:i 21 | 5 | 5 10 | null | null ; + +minWithSortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + languages; + + min:i | languages:i +25324 |5 +25976 |1 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; + +minWithSortExpression2#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + CASE(languages == 5, 655, languages); + + min:i | languages:i +25976 |1 +25324 |5 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index f1a15f41af7b3..2bf9259478032 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -150,10 +150,10 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc 2017-11-10T20:21:58.000Z|154551962150890564|9382204513185396493|63 |OK ; -autoBucket +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM ul_logs | WHERE @timestamp >= "2017-11-10T20:30:00Z" AND @timestamp < "2017-11-10T20:35:00Z" -| EVAL bh = auto_bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) +| EVAL bh = bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) | SORT @timestamp | KEEP @timestamp, bytes_in, bh ; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 17082e9855761..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -994,29 +994,19 @@ public void testOverlappingIndexPatterns() throws Exception { .add(new IndexRequest("test_overlapping_index_patterns_2").id("1").source("field", "foo")) .get(); - assertVerificationException("from test_overlapping_index_patterns_* | sort field"); + assertThrows(VerificationException.class, () -> run("from test_overlapping_index_patterns_* | sort field")); } public void testErrorMessageForUnknownColumn() { - var e = assertVerificationException("row a = 1 | eval x = b"); + var e = expectThrows(VerificationException.class, () -> run("row a = 1 | eval x = b")); assertThat(e.getMessage(), containsString("Unknown column [b]")); } - // Straightforward verification. Subclasses can override. - protected Exception assertVerificationException(String esqlCommand) { - return expectThrows(VerificationException.class, () -> run(esqlCommand)); - } - public void testErrorMessageForEmptyParams() { - var e = assertParsingException("row a = 1 | eval x = ?"); + var e = expectThrows(ParsingException.class, () -> run("row a = 1 | eval x = ?")); assertThat(e.getMessage(), containsString("Not enough actual parameters 0")); } - // Straightforward verification. Subclasses can override. - protected Exception assertParsingException(String esqlCommand) { - return expectThrows(ParsingException.class, () -> run(esqlCommand)); - } - public void testEmptyIndex() { assertAcked(client().admin().indices().prepareCreate("test_empty").setMapping("k", "type=keyword", "v", "type=long").get()); try (EsqlQueryResponse results = run("from test_empty")) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index e884b67fb5d24..e2e635917ed1c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -25,8 +24,6 @@ import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; -import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.nio.file.Path; @@ -37,7 +34,6 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsEqual.equalTo; @@ -122,26 +118,6 @@ AcknowledgedResponse deleteAsyncId(String id) { } } - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertVerificationException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, VerificationException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("verification_exception")); - } - return e; - } - - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertParsingException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, ParsingException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("parsing_exception")); - } - return e; - } - public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { public LocalStateEsqlAsync(final Settings settings, final Path configPath) { super(settings, configPath); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java new file mode 100644 index 0000000000000..0427afb6d80c8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Booleans. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBoolean extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForBoolean(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + BooleanBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getBoolean(firstValueIndex + v); + } + } + } + + private boolean[] extendCell(boolean[] oldCell, int newValueCount) { + if (oldCell == null) { + return new boolean[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBoolean(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java new file mode 100644 index 0000000000000..ff881da5baf44 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for BytesRefs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBytesRef extends EnrichResultBuilder { + private final BytesRefArray bytes; // shared between all cells + private ObjectArray cells; + + EnrichResultBuilderForBytesRef(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } + } + + @Override + void addInputPage(IntVector positions, Page page) { + BytesRefBlock block = page.getBlock(channel); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(totalPositions)) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBytesRef(bytes.get(v, scratch)); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(bytes, cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java new file mode 100644 index 0000000000000..93c178d816326 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Doubles. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForDouble extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForDouble(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + DoubleBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getDouble(firstValueIndex + v); + } + } + } + + private double[] extendCell(double[] oldCell, int newValueCount) { + if (oldCell == null) { + return new double[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendDouble(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java new file mode 100644 index 0000000000000..4dec877e0d1e4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Ints. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForInt extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForInt(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + IntBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getInt(firstValueIndex + v); + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendInt(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java new file mode 100644 index 0000000000000..0dd4d1d0a8a0d --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Longs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForLong extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForLong(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + LongBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getLong(firstValueIndex + v); + } + } + } + + private long[] extendCell(long[] oldCell, int newValueCount) { + if (oldCell == null) { + return new long[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendLong(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java new file mode 100644 index 0000000000000..947b1ecb49d0c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final DriverContext driverContext; + + public LocateNoStartEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + return eval(page.getPositionCount(), strVector, substrVector).asBlock(); + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr) { + this.source = source; + this.str = str; + this.substr = substr; + } + + @Override + public LocateNoStartEvaluator get(DriverContext context) { + return new LocateNoStartEvaluator(source, str.get(context), substr.get(context), context); + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 77120c757e97a..e5d4e58d9d61b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -37,7 +37,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; -import org.elasticsearch.compute.operator.ProjectOperator; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; @@ -318,22 +317,10 @@ private void doLookup( 0 ) ); - - // drop docs block - intermediateOperators.add(droppingBlockOperator(extractFields.size() + 2, 0)); - boolean singleLeaf = searchContext.searcher().getLeafContexts().size() == 1; - // merging field-values by position - final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 1).toArray(); + final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); intermediateOperators.add( - new MergePositionsOperator( - singleLeaf, - inputPage.getPositionCount(), - 0, - mergingChannels, - mergingTypes, - driverContext.blockFactory() - ) + new MergePositionsOperator(inputPage.getPositionCount(), 1, mergingChannels, mergingTypes, driverContext.blockFactory()) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); @@ -392,17 +379,6 @@ private Page createNullResponse(int positionCount, List extract } } - private static Operator droppingBlockOperator(int totalBlocks, int droppingPosition) { - var size = totalBlocks - 1; - var projection = new ArrayList(size); - for (int i = 0; i < totalBlocks; i++) { - if (i != droppingPosition) { - projection.add(i); - } - } - return new ProjectOperator(projection); - } - private class TransportHandler implements TransportRequestHandler { @Override public void messageReceived(LookupRequest request, TransportChannel channel, Task task) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java new file mode 100644 index 0000000000000..5bb42f3090695 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; + +/** + * An abstract class responsible for collecting values for an output block of enrich. + * The incoming values of the same position are combined and added to a single corresponding position. + */ +abstract class EnrichResultBuilder implements Releasable { + protected final BlockFactory blockFactory; + protected final int channel; + protected final int totalPositions; + private long usedBytes; + + EnrichResultBuilder(BlockFactory blockFactory, int channel, int totalPositions) { + this.blockFactory = blockFactory; + this.channel = channel; + this.totalPositions = totalPositions; + } + + /** + * Collects the input values from the input page. + * + * @param positions the positions vector + * @param page the input page. The block located at {@code channel} is the value block + */ + abstract void addInputPage(IntVector positions, Page page); + + abstract Block build(); + + final void adjustBreaker(long bytes) { + blockFactory.breaker().addEstimateBytesAndMaybeBreak(bytes, "<>"); + usedBytes += bytes; + } + + @Override + public void close() { + blockFactory.breaker().addWithoutBreaking(-usedBytes); + } + + static EnrichResultBuilder enrichResultBuilder(ElementType elementType, BlockFactory blockFactory, int channel, int totalPositions) { + return switch (elementType) { + case NULL -> new EnrichResultBuilderForNull(blockFactory, channel, totalPositions); + case INT -> new EnrichResultBuilderForInt(blockFactory, channel, totalPositions); + case LONG -> new EnrichResultBuilderForLong(blockFactory, channel, totalPositions); + case DOUBLE -> new EnrichResultBuilderForDouble(blockFactory, channel, totalPositions); + case BOOLEAN -> new EnrichResultBuilderForBoolean(blockFactory, channel, totalPositions); + case BYTES_REF -> new EnrichResultBuilderForBytesRef(blockFactory, channel, totalPositions); + default -> throw new IllegalArgumentException("no enrich result builder for [" + elementType + "]"); + }; + } + + private static class EnrichResultBuilderForNull extends EnrichResultBuilder { + EnrichResultBuilderForNull(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + assert page.getBlock(channel).areAllValuesNull() : "expected all nulls; but got values"; + } + + @Override + Block build() { + return blockFactory.newConstantNullBlock(totalPositions); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java index 89447807db5b9..a3b7a8be61e2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java @@ -11,12 +11,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import java.util.Arrays; +import java.util.Objects; /** * Combines values at the given blocks with the same positions into a single position for the blocks at the given channels @@ -44,21 +45,13 @@ */ final class MergePositionsOperator implements Operator { private boolean finished = false; - private int filledPositions = 0; - private final boolean singleMode; - private final int positionCount; private final int positionChannel; - private final Block.Builder[] outputBuilders; - private final int[] mergingChannels; - private final ElementType[] mergingTypes; - private PositionBuilder positionBuilder = null; + private final EnrichResultBuilder[] builders; private Page outputPage; - private final BlockFactory blockFactory; MergePositionsOperator( - boolean singleMode, int positionCount, int positionChannel, int[] mergingChannels, @@ -73,123 +66,51 @@ final class MergePositionsOperator implements Operator { + Arrays.toString(mergingTypes) ); } - this.blockFactory = blockFactory; - this.singleMode = singleMode; - this.positionCount = positionCount; this.positionChannel = positionChannel; - this.mergingChannels = mergingChannels; - this.mergingTypes = mergingTypes; - this.outputBuilders = new Block.Builder[mergingTypes.length]; + this.builders = new EnrichResultBuilder[mergingTypes.length]; try { for (int i = 0; i < mergingTypes.length; i++) { - outputBuilders[i] = mergingTypes[i].newBlockBuilder(positionCount, blockFactory); + builders[i] = EnrichResultBuilder.enrichResultBuilder(mergingTypes[i], blockFactory, mergingChannels[i], positionCount); } } finally { - if (outputBuilders[outputBuilders.length - 1] == null) { - Releasables.close(outputBuilders); + if (builders[builders.length - 1] == null) { + Releasables.close(builders); } } } @Override public boolean needsInput() { - return true; + return finished == false; } @Override public void addInput(Page page) { try { final IntBlock positions = page.getBlock(positionChannel); - final int currentPosition = positions.getInt(0); - if (singleMode) { - fillNullUpToPosition(currentPosition); - for (int i = 0; i < mergingChannels.length; i++) { - int channel = mergingChannels[i]; - outputBuilders[i].appendAllValuesToCurrentPosition(page.getBlock(channel)); - } - filledPositions++; - } else { - if (positionBuilder != null && positionBuilder.position != currentPosition) { - flushPositionBuilder(); - } - if (positionBuilder == null) { - positionBuilder = new PositionBuilder(currentPosition, mergingTypes, blockFactory); - } - positionBuilder.combine(page, mergingChannels); + final IntVector positionsVector = Objects.requireNonNull(positions.asVector(), "positions must be a vector"); + for (EnrichResultBuilder builder : builders) { + builder.addInputPage(positionsVector, page); } } finally { Releasables.closeExpectNoException(page::releaseBlocks); } } - static final class PositionBuilder implements Releasable { - private final int position; - private final Block.Builder[] builders; - - PositionBuilder(int position, ElementType[] elementTypes, BlockFactory blockFactory) { - this.position = position; - this.builders = new Block.Builder[elementTypes.length]; - try { - for (int i = 0; i < builders.length; i++) { - builders[i] = elementTypes[i].newBlockBuilder(1, blockFactory); - } - } finally { - if (builders[builders.length - 1] == null) { - Releasables.close(builders); - } - } - } - - void combine(Page page, int[] channels) { - for (int i = 0; i < channels.length; i++) { - Block block = page.getBlock(channels[i]); - builders[i].appendAllValuesToCurrentPosition(block); - } - } - - void buildTo(Block.Builder[] output) { - for (int i = 0; i < output.length; i++) { - try (var b = builders[i]; Block block = b.build()) { - output[i].appendAllValuesToCurrentPosition(block); - } + @Override + public void finish() { + final Block[] blocks = new Block[builders.length]; + try { + for (int i = 0; i < builders.length; i++) { + blocks[i] = builders[i].build(); } - } - - @Override - public void close() { - Releasables.close(builders); - } - } - - private void flushPositionBuilder() { - fillNullUpToPosition(positionBuilder.position); - filledPositions++; - try (var p = positionBuilder) { - p.buildTo(outputBuilders); + outputPage = new Page(blocks); } finally { - positionBuilder = null; - } - } - - private void fillNullUpToPosition(int position) { - while (filledPositions < position) { - for (Block.Builder builder : outputBuilders) { - builder.appendNull(); + finished = true; + if (outputPage == null) { + Releasables.close(blocks); } - filledPositions++; - } - } - - @Override - public void finish() { - if (positionBuilder != null) { - flushPositionBuilder(); } - fillNullUpToPosition(positionCount); - final Block[] blocks = Block.Builder.buildAll(outputBuilders); - outputPage = new Page(blocks); - assert outputPage.getPositionCount() == positionCount; - finished = true; } @Override @@ -206,7 +127,7 @@ public Page getOutput() { @Override public void close() { - Releasables.close(Releasables.wrap(outputBuilders), positionBuilder, () -> { + Releasables.close(Releasables.wrap(builders), () -> { if (outputPage != null) { outputPage.releaseBlocks(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st new file mode 100644 index 0000000000000..4c5c9fabfa797 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +$else$ +import org.apache.lucene.util.RamUsageEstimator; +$endif$ +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +$if(long)$ +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.$Type$Block; +$else$ +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.IntVector; +$endif$ +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for $Type$s. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderFor$Type$ extends EnrichResultBuilder { +$if(BytesRef)$ + private final BytesRefArray bytes; // shared between all cells +$endif$ + private ObjectArray<$if(BytesRef)$int$else$$type$$endif$[]> cells; + + EnrichResultBuilderFor$Type$(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); +$if(BytesRef)$ + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } +$endif$ + } + + @Override + void addInputPage(IntVector positions, Page page) { + $Type$Block block = page.getBlock(channel); +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); +$if(BytesRef)$ + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } +$else$ + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.get$Type$(firstValueIndex + v); + } +$endif$ + } + } + + private $if(BytesRef)$int$else$$type$$endif$[] extendCell($if(BytesRef)$int$else$$type$$endif$[] oldCell, int newValueCount) { + if (oldCell == null) { + return new $if(BytesRef)$int$else$$type$$endif$[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(totalPositions)) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { +$if(BytesRef)$ + builder.appendBytesRef(bytes.get(v, scratch)); +$else$ + builder.append$Type$(v); +$endif$ + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close($if(BytesRef)$bytes, $endif$cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 178c714950b05..62688d753aeef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -48,7 +48,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -108,6 +108,7 @@ import java.lang.reflect.Constructor; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -142,7 +143,7 @@ private FunctionDefinition[][] functions() { def(Asin.class, Asin::new, "asin"), def(Atan.class, Atan::new, "atan"), def(Atan2.class, Atan2::new, "atan2"), - def(AutoBucket.class, AutoBucket::new, "auto_bucket"), + def(Bucket.class, Bucket::new, "bucket"), def(Ceil.class, Ceil::new, "ceil"), def(Cos.class, Cos::new, "cos"), def(Cosh.class, Cosh::new, "cosh"), @@ -245,7 +246,21 @@ public static String normalizeName(String name) { return name.toLowerCase(Locale.ROOT); } - public record ArgSignature(String name, String[] type, String description, boolean optional) {} + public record ArgSignature(String name, String[] type, String description, boolean optional) { + @Override + public String toString() { + return "ArgSignature{" + + "name='" + + name + + "', type=" + + Arrays.toString(type) + + ", description='" + + description + + "', optional=" + + optional + + '}'; + } + } public record FunctionDescription( String name, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java index 0cee9d2c53cde..7c9a788eed36c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java @@ -18,6 +18,12 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) public @interface Example { + + /** + * The description that will appear before the example + */ + String description() default ""; + /** * The test file that contains the example. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 0f35b95a287ad..39ad0351b199f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -34,14 +35,26 @@ public class DateTrunc extends BinaryDateTimeFunction implements EvaluatorMapper { - @FunctionInfo(returnType = "date", description = "Rounds down a date to the closest interval.") + @FunctionInfo( + returnType = "date", + description = "Rounds down a date to the closest interval.", + examples = { + @Example(file = "date", tag = "docsDateTrunc"), + @Example( + description = "Combine `DATE_TRUNC` with <> to create date histograms. For\n" + + "example, the number of hires per year:", + file = "date", + tag = "docsDateTruncHistogram" + ), + @Example(description = "Or an hourly error rate:", file = "conditional", tag = "docsCaseHourlyErrorRate") } + ) public DateTrunc( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial @Param( name = "interval", - type = { "keyword" }, + type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, @Param(name = "date", type = { "date" }, description = "Date expression") Expression field @@ -55,8 +68,8 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - return isDate(timestampField(), sourceText(), FIRST).and( - isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), SECOND, "dateperiod", "timeduration") + return isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), FIRST, "dateperiod", "timeduration").and( + isDate(timestampField(), sourceText(), SECOND) ); } @@ -105,7 +118,7 @@ private static Rounding.Prepared createRounding(final Period period, final ZoneI long periods = period.getUnits().stream().filter(unit -> period.get(unit) != 0).count(); if (periods != 1) { - throw new IllegalArgumentException("Time interval is not supported"); + throw new IllegalArgumentException("Time interval with multiple periods is not supported"); } final Rounding.Builder rounding; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java similarity index 94% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java index b9aeff7f1d935..b58a9bae08146 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java @@ -48,13 +48,13 @@ *

* Takes a date field and three constants and picks a bucket size based on the * constants. The constants are "target bucket count", "from", and "to". It looks like: - * {@code auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. + * {@code bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. * We have a list of "human" bucket sizes like "one month" and "four hours". We pick * the largest range that covers the range in fewer than the target bucket count. So * in the above case we'll pick month long buckets, yielding 12 buckets. *

*/ -public class AutoBucket extends EsqlScalarFunction implements Validatable { +public class Bucket extends EsqlScalarFunction implements Validatable { // TODO maybe we should just cover the whole of representable dates here - like ten years, 100 years, 1000 years, all the way up. // That way you never end up with more than the target number of buckets. private static final Rounding LARGEST_HUMAN_DATE_ROUNDING = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).build(); @@ -86,12 +86,12 @@ public class AutoBucket extends EsqlScalarFunction implements Validatable { @FunctionInfo(returnType = { "double", "date" }, description = """ Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into.""") - public AutoBucket( + public Bucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, - @Param(name = "from", type = { "integer", "long", "double", "date", "string" }) Expression from, - @Param(name = "to", type = { "integer", "long", "double", "date", "string" }) Expression to + @Param(name = "from", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression to ) { super(source, List.of(field, buckets, from, to)); this.field = field; @@ -226,12 +226,12 @@ public DataType dataType() { @Override public Expression replaceChildren(List newChildren) { - return new AutoBucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); + return new Bucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } @Override protected NodeInfo info() { - return NodeInfo.create(this, AutoBucket::new, field, buckets, from, to); + return NodeInfo.create(this, Bucket::new, field, buckets, from, to); } public Expression field() { @@ -252,6 +252,6 @@ public Expression to() { @Override public String toString() { - return "AutoBucket{" + "field=" + field + ", buckets=" + buckets + ", from=" + from + ", to=" + to + '}'; + return "Bucket{" + "field=" + field + ", buckets=" + buckets + ", from=" + from + ", to=" + to + '}'; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index c8b546718aabf..52d60da3f7341 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -28,8 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** * Locate function, given a string 'a' and a substring 'b', it returns the index of the first occurrence of the substring 'b' in 'a'. @@ -80,7 +80,7 @@ protected TypeResolution resolveType() { return resolution; } - return start == null ? TypeResolution.TYPE_RESOLVED : isInteger(start, sourceText(), THIRD); + return start == null ? TypeResolution.TYPE_RESOLVED : isType(start, dt -> dt == DataTypes.INTEGER, sourceText(), THIRD, "integer"); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 27e3c95bd123a..a0fecd731c71c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -69,7 +69,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -378,7 +378,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Trim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), // ScalarFunction of(ScalarFunction.class, Atan2.class, PlanNamedTypes::writeAtan2, PlanNamedTypes::readAtan2), - of(ScalarFunction.class, AutoBucket.class, PlanNamedTypes::writeAutoBucket, PlanNamedTypes::readAutoBucket), + of(ScalarFunction.class, Bucket.class, PlanNamedTypes::writeBucket, PlanNamedTypes::readBucket), of(ScalarFunction.class, Case.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), @@ -1395,11 +1395,11 @@ static void writeAtan2(PlanStreamOutput out, Atan2 atan2) throws IOException { out.writeExpression(atan2.x()); } - static AutoBucket readAutoBucket(PlanStreamInput in) throws IOException { - return new AutoBucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); + static Bucket readBucket(PlanStreamInput in) throws IOException { + return new Bucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); } - static void writeAutoBucket(PlanStreamOutput out, AutoBucket bucket) throws IOException { + static void writeBucket(PlanStreamOutput out, Bucket bucket) throws IOException { out.writeSource(bucket.source()); out.writeExpression(bucket.field()); out.writeExpression(bucket.buckets()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 7fb2784bb044f..2aaf34a1dd1d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -84,6 +84,7 @@ import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.SubstituteSurrogates.rawTemporaryName; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN; @@ -125,7 +126,8 @@ protected static Batch substitutions() { new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject(), new SkipQueryOnEmptyMappings(), - new SubstituteSpatialSurrogates() + new SubstituteSpatialSurrogates(), + new ReplaceOrderByExpressionWithEval() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); } @@ -321,6 +323,35 @@ protected SpatialRelatesFunction rule(SpatialRelatesFunction function) { } } + static class ReplaceOrderByExpressionWithEval extends OptimizerRules.OptimizerRule { + private static int counter = 0; + + @Override + protected LogicalPlan rule(OrderBy orderBy) { + int size = orderBy.order().size(); + List evals = new ArrayList<>(size); + List newOrders = new ArrayList<>(size); + + for (int i = 0; i < size; i++) { + var order = orderBy.order().get(i); + if (order.child() instanceof Attribute == false) { + var name = rawTemporaryName("order_by", String.valueOf(i), String.valueOf(counter++)); + var eval = new Alias(order.child().source(), name, order.child()); + newOrders.add(order.replaceChildren(List.of(eval.toAttribute()))); + evals.add(eval); + } else { + newOrders.add(order); + } + } + if (evals.isEmpty()) { + return orderBy; + } else { + var newOrderBy = new OrderBy(orderBy.source(), new Eval(orderBy.source(), orderBy.child(), evals), newOrders); + return new Project(orderBy.source(), newOrderBy, orderBy.output()); + } + } + } + static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRule { ConvertStringToByteRef() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java index 8785b8f5de887..afb7ee6f53029 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -7,20 +7,29 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.plugin.AbstractTransportQlAsyncGetResultsAction; +import org.elasticsearch.xpack.ql.tree.Source; public class TransportEsqlAsyncGetResultsAction extends AbstractTransportQlAsyncGetResultsAction { @@ -51,8 +60,57 @@ public TransportEsqlAsyncGetResultsAction( this.blockFactory = blockFactory; } + @Override + protected void doExecute(Task task, GetAsyncResultRequest request, ActionListener listener) { + super.doExecute(task, request, unwrapListener(listener)); + } + @Override public Writeable.Reader responseReader() { return EsqlQueryResponse.reader(blockFactory); } + + static final String PARSE_EX_NAME = ElasticsearchException.getExceptionName(new ParsingException(Source.EMPTY, "")); + static final String VERIFY_EX_NAME = ElasticsearchException.getExceptionName(new VerificationException("")); + + /** + * Unwraps the exception in the case of failure. This keeps the exception types + * the same as the sync API, namely ParsingException and ParsingException. + */ + static ActionListener unwrapListener(ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(R o) { + listener.onResponse(o); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ElasticsearchWrapperException && e instanceof ElasticsearchException ee) { + e = unwrapEsException(ee); + } + if (e instanceof NotSerializableExceptionWrapper wrapper) { + String name = wrapper.getExceptionName(); + if (PARSE_EX_NAME.equals(name)) { + e = new ParsingException(Source.EMPTY, e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } else if (VERIFY_EX_NAME.contains(name)) { + e = new VerificationException(e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } + } + listener.onFailure(e); + } + }; + } + + static RuntimeException unwrapEsException(ElasticsearchException esEx) { + Throwable root = esEx.unwrapCause(); + if (root instanceof RuntimeException runtimeException) { + return runtimeException; + } + return esEx; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 662ae1a208ed0..573dbd20b39c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -99,8 +99,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.ListMatcher.matchesList; -import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; @@ -470,6 +468,6 @@ private void assertWarnings(List warnings) { normalized.add(normW); } } - assertMap(normalized, matchesList(testCase.expectedWarnings(true))); + EsqlTestUtils.assertWarnings(normalized, testCase.expectedWarnings(true), testCase.expectedWarningsRegex()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index aedc789620480..f4ecf38915a29 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1118,36 +1118,36 @@ public void testDateParseOnIntPattern() { public void testDateTruncOnInt() { verifyUnsupported(""" from test - | eval date_trunc("1M", int) - """, "first argument of [date_trunc(\"1M\", int)] must be [datetime], found value [int] type [integer]"); + | eval date_trunc(1 month, int) + """, "second argument of [date_trunc(1 month, int)] must be [datetime], found value [int] type [integer]"); } public void testDateTruncOnFloat() { verifyUnsupported(""" from test - | eval date_trunc("1M", float) - """, "first argument of [date_trunc(\"1M\", float)] must be [datetime], found value [float] type [double]"); + | eval date_trunc(1 month, float) + """, "second argument of [date_trunc(1 month, float)] must be [datetime], found value [float] type [double]"); } public void testDateTruncOnText() { verifyUnsupported(""" from test - | eval date_trunc("1M", keyword) - """, "first argument of [date_trunc(\"1M\", keyword)] must be [datetime], found value [keyword] type [keyword]"); + | eval date_trunc(1 month, keyword) + """, "second argument of [date_trunc(1 month, keyword)] must be [datetime], found value [keyword] type [keyword]"); } public void testDateTruncWithNumericInterval() { verifyUnsupported(""" from test | eval date_trunc(1, date) - """, "second argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); + """, "first argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); } public void testDateTruncWithDateInterval() { verifyUnsupported(""" from test | eval date_trunc(date, date) - """, "second argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); + """, "first argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); } // check field declaration is validated even across duplicated declarations diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java new file mode 100644 index 0000000000000..f6e8b9107504c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EnrichResultBuilderTests extends ESTestCase { + + public void testBytesRef() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.BYTES_REF, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newBytesRefBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + BytesRef val = new BytesRef(randomByteArrayOfLength(10)); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendBytesRef(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testLong() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.LONG, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newLongBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + long val = randomLong(); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendLong(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (LongBlock actualOutput = (LongBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getLong(first + v), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + BlockFactory blockFactory() { + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(100)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + return new BlockFactory(breaker, bigArrays); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java index 80d127fc81907..09bc36a5390af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java @@ -32,7 +32,6 @@ public void testSimple() throws Exception { CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = new BlockFactory(breaker, bigArrays); MergePositionsOperator mergeOperator = new MergePositionsOperator( - randomBoolean(), 7, 0, new int[] { 1, 2 }, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index b97622f28520c..bc7a67d9eaefa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function; +import com.carrotsearch.randomizedtesting.ClassModel; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; @@ -33,7 +36,9 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -61,14 +66,11 @@ import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.rules.TestRule; -import org.junit.runner.Description; -import org.junit.runners.model.Statement; import java.io.IOException; import java.io.UncheckedIOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -99,6 +101,7 @@ import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -214,7 +217,10 @@ public static ExpressionEvaluator.Factory evaluator(Expression e) { } Layout.Builder builder = new Layout.Builder(); buildLayout(builder, e); - assertTrue(e.resolved()); + Expression.TypeResolution resolution = e.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } return EvalMapper.toEvaluator(e, builder.build()); } @@ -242,18 +248,11 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - testEvaluate(false); - } - - public final void testEvaluateFloating() { - testEvaluate(true); - } - - private void testEvaluate(boolean readFloating) { assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); + boolean readFloating = randomBoolean(); Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); if (testCase.getExpectedTypeError() != null) { assertTrue("expected unresolved", expression.typeResolved().unresolved()); @@ -263,7 +262,10 @@ private void testEvaluate(boolean readFloating) { } return; } - assertFalse("expected resolved", expression.typeResolved().unresolved()); + Expression.TypeResolution resolution = expression.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); @@ -296,47 +298,27 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { } /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read directly from the page. + * Evaluates a {@link Block} of values, all copied from the input pattern.. *

* Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. *

*/ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read from an intermediate operator. - *

- * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

- */ - public final void testEvaluateBlockWithoutNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); } /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page. + * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator. - */ - public final void testEvaluateBlockWithNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); } /** * Evaluates a {@link Block} of values, all copied from the input pattern, - * read directly from the {@link Page}, using the - * {@link CrankyCircuitBreakerService} which fails randomly. + * using the {@link CrankyCircuitBreakerService} which fails randomly. *

* Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. @@ -345,25 +327,7 @@ public final void testEvaluateBlockWithNullsFloating() { public final void testCrankyEvaluateBlockWithoutNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, - * read from an intermediate operator, using the - * {@link CrankyCircuitBreakerService} which fails randomly. - *

- * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

- */ - public final void testCrankyEvaluateBlockWithoutNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -371,27 +335,12 @@ public final void testCrankyEvaluateBlockWithoutNullsFloating() { /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page, - * using the {@link CrankyCircuitBreakerService} which fails randomly. + * some null values inserted between, using the {@link CrankyCircuitBreakerService} which fails randomly. */ public final void testCrankyEvaluateBlockWithNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator, - * using the {@link CrankyCircuitBreakerService} which fails randomly. - */ - public final void testCrankyEvaluateBlockWithNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -404,9 +353,10 @@ protected Matcher allNullsMatcher() { return nullValue(); } - private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls, boolean readFloating) { + private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); + boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); @@ -577,66 +527,56 @@ public void testSerializationOfSimple() { assertSerialization(buildFieldExpression(testCase)); } - private static boolean ranAllTests = false; - - @ClassRule - public static TestRule rule = new TestRule() { - @Override - public Statement apply(Statement base, Description description) { - for (Description d : description.getChildren()) { - if (d.getChildren().size() > 1) { - ranAllTests = true; - return base; - } - } - return base; - } - }; - @AfterClass public static void testFunctionInfo() { - if (ranAllTests == false) { - LogManager.getLogger(getTestClass()).info("Skipping function info checks because we're running a portion of the tests"); - return; - } + Logger log = LogManager.getLogger(getTestClass()); FunctionDefinition definition = definition(functionName()); if (definition == null) { - LogManager.getLogger(getTestClass()).info("Skipping function info checks because the function isn't registered"); + log.info("Skipping function info checks because the function isn't registered"); return; } - LogManager.getLogger(getTestClass()).info("Running function info checks"); + // TODO fix case tests to include all supported types + assumeFalse("CASE test incomplete", definition.name().equals("case")); + log.info("Running function info checks"); EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); List args = description.args(); - assertTrue("expect description to be defined", description.description() != null && description.description().length() > 0); + assertTrue("expect description to be defined", description.description() != null && false == description.description().isEmpty()); List> typesFromSignature = new ArrayList<>(); Set returnFromSignature = new HashSet<>(); for (int i = 0; i < args.size(); i++) { typesFromSignature.add(new HashSet<>()); } - for (Map.Entry, DataType> entry : signatures.entrySet()) { + for (Map.Entry, DataType> entry : signatures().entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { - typesFromSignature.get(i).add(types.get(i).esType()); + typesFromSignature.get(i).add(signatureType(types.get(i))); } returnFromSignature.add(entry.getValue().esType()); } for (int i = 0; i < args.size(); i++) { - Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toCollection(() -> new TreeSet<>())); + EsqlFunctionRegistry.ArgSignature arg = args.get(i); + Set annotationTypes = Arrays.stream(arg.type()).collect(Collectors.toCollection(TreeSet::new)); Set signatureTypes = typesFromSignature.get(i); if (signatureTypes.isEmpty()) { + log.info("{}: skipping", arg.name()); continue; } + log.info("{}: tested {} vs annotated {}", arg.name(), signatureTypes, annotationTypes); assertEquals(signatureTypes, annotationTypes); } - Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(() -> new TreeSet<>())); + Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(TreeSet::new)); assertEquals(returnFromSignature, returnTypes); } + private static String signatureType(DataType type) { + return type.esType() != null ? type.esType() : type.typeName(); + } + /** * Adds cases with {@code null} and asserts that the result is {@code null}. *

@@ -651,6 +591,28 @@ public static void testFunctionInfo() { * on input types like {@link Greatest} or {@link Coalesce}. */ protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { + return anyNullIsNull( + testCaseSuppliers, + (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false + && nullValueDataType == DataTypes.NULL + && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), + (nullPosition, original) -> original + ); + } + + public interface ExpectedType { + DataType expectedType(int nullPosition, DataType nullValueDataType, TestCaseSupplier.TestCase original); + } + + public interface ExpectedEvaluatorToString { + Matcher evaluatorToString(int nullPosition, Matcher original); + } + + protected static List anyNullIsNull( + List testCaseSuppliers, + ExpectedType expectedType, + ExpectedEvaluatorToString evaluatorToString + ) { typesRequired(testCaseSuppliers); List suppliers = new ArrayList<>(testCaseSuppliers.size()); suppliers.addAll(testCaseSuppliers); @@ -673,15 +635,12 @@ protected static List anyNullIsNull(boolean entirelyNullPreser TestCaseSupplier.TestCase oc = original.get(); List data = IntStream.range(0, oc.getData().size()).mapToObj(i -> { TestCaseSupplier.TypedData od = oc.getData().get(i); - if (i == finalNullPosition) { - return new TestCaseSupplier.TypedData(null, od.type(), od.name()); - } - return od; + return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); return new TestCaseSupplier.TestCase( data, - oc.evaluatorToString(), - oc.expectedType(), + evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), nullValue(), null, oc.getExpectedTypeError(), @@ -704,7 +663,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser return new TestCaseSupplier.TestCase( data, equalTo("LiteralsEvaluator[lit=null]"), - entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType(), + expectedType.expectedType(finalNullPosition, DataTypes.NULL, oc), nullValue(), null, oc.getExpectedTypeError(), @@ -810,9 +769,8 @@ private static Stream> allPermutations(int argumentCount) { if (argumentCount == 0) { return Stream.of(List.of()); } - if (argumentCount > 4) { - // TODO check for a limit 4. is arbitrary. - throw new IllegalArgumentException("would generate too many types"); + if (argumentCount > 3) { + throw new IllegalArgumentException("would generate too many combinations"); } Stream> stream = representable().map(t -> List.of(t)); for (int i = 1; i < argumentCount; i++) { @@ -894,6 +852,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List classGeneratingSignatures = null; /** - * Unique signatures encountered by this test. - *

- * We clear this at the beginning of the test class with - * {@link #clearSignatures} out of paranoia. It is - * shared by many tests, after all. - *

- *

- * After each test method we add the signature it operated on via - * {@link #trackSignature}. Once the test class is done we render - * all the unique signatures to a temp file with {@link #renderTypes}. - * We use a temp file because that's all we're allowed to write to. - * Gradle will move the files into the docs after this is done. - *

+ * Unique signatures in this test's parameters. */ - private static final Map, DataType> signatures = new HashMap<>(); - - @BeforeClass - public static void clearSignatures() { - signatures.clear(); - } + private static Map, DataType> signatures; - @After - public void trackSignature() { - if (testCase.getExpectedTypeError() != null) { - return; + private static Map, DataType> signatures() { + Class testClass = getTestClass(); + if (signatures != null && classGeneratingSignatures == testClass) { + return signatures; } - if (testCase.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { - return; + signatures = new HashMap<>(); + Set paramsFactories = new ClassModel(testClass).getAnnotatedLeafMethods(ParametersFactory.class).keySet(); + assertThat(paramsFactories, hasSize(1)); + Method paramsFactory = paramsFactories.iterator().next(); + List params; + try { + params = (List) paramsFactory.invoke(null); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); } - signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType()); + for (Object p : params) { + TestCaseSupplier tcs = (TestCaseSupplier) ((Object[]) p)[0]; + TestCaseSupplier.TestCase tc = tcs.get(); + if (tc.getExpectedTypeError() != null) { + continue; + } + if (tc.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { + continue; + } + signatures.putIfAbsent(tc.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), tc.expectedType()); + } + return signatures; } @AfterClass @@ -1108,6 +1066,17 @@ public static void renderDocs() throws IOException { renderDescription(description.description(), info.note()); boolean hasExamples = renderExamples(info); renderFullLayout(name, hasExamples); + renderKibanaInlineDocs(name, info); + List args = description.args(); + if (name.equals("case")) { + EsqlFunctionRegistry.ArgSignature falseValue = args.get(1); + args = List.of( + args.get(0), + falseValue, + new EsqlFunctionRegistry.ArgSignature("falseValue", falseValue.type(), falseValue.description(), true) + ); + } + renderKibanaFunctionDefinition(name, info, args, description.variadic()); return; } LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function '" + name + "' isn't registered"); @@ -1124,7 +1093,7 @@ private static void renderTypes(List argNames) throws IOException { header.append("result"); List table = new ArrayList<>(); - for (Map.Entry, DataType> sig : signatures.entrySet()) { + for (Map.Entry, DataType> sig : signatures().entrySet()) { // TODO flip to using sortedSignatures if (sig.getKey().size() != argNames.size()) { continue; } @@ -1183,6 +1152,10 @@ private static boolean renderExamples(FunctionInfo info) throws IOException { builder.append("*Examples*\n\n"); } for (Example example : info.examples()) { + if (example.description().length() > 0) { + builder.append(example.description()); + builder.append("\n"); + } builder.append(""" [source.merge.styled,esql] ---- @@ -1223,6 +1196,130 @@ private static void renderFullLayout(String name, boolean hasExamples) throws IO writeToTempDir("layout", rendered, "asciidoc"); } + private static void renderKibanaInlineDocs(String name, FunctionInfo info) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append(""" + + + """); + builder.append("### ").append(name.toUpperCase(Locale.ROOT)).append("\n"); + builder.append(info.description()).append("\n\n"); + + if (info.examples().length > 0) { + Example example = info.examples()[0]; + builder.append("```\n"); + builder.append("read-example::").append(example.file()).append(".csv-spec[tag=").append(example.tag()).append("]\n"); + builder.append("```\n"); + } + if (Strings.isNullOrEmpty(info.note()) == false) { + builder.append("Note: ").append(info.note()).append("\n"); + } + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", functionName(), rendered); + writeToTempDir("kibana/docs", rendered, "md"); + } + + private static void renderKibanaFunctionDefinition( + String name, + FunctionInfo info, + List args, + boolean variadic + ) throws IOException { + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint().lfAtEnd().startObject(); + builder.field( + "comment", + "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it." + ); + builder.field("type", "eval"); // TODO aggs in here too + builder.field("name", name); + builder.field("description", removeAsciidocLinks(info.description())); + if (Strings.isNullOrEmpty(info.note()) == false) { + builder.field("note", removeAsciidocLinks(info.note())); + } + // TODO aliases + + builder.startArray("signatures"); + if (args.isEmpty()) { + builder.startObject(); + builder.startArray("params"); + builder.endArray(); + // There should only be one return type so just use that as the example + builder.field("returnType", signatures().values().iterator().next().typeName()); + builder.endObject(); + } else { + int minArgCount = (int) args.stream().filter(a -> false == a.optional()).count(); + for (Map.Entry, DataType> sig : sortedSignatures()) { + if (variadic && sig.getKey().size() > args.size()) { + // For variadic functions we test much longer signatures, let's just stop at the last one + continue; + } + // TODO make constants for auto_bucket so the signatures get recognized + if (name.equals("auto_bucket") == false && sig.getKey().size() < minArgCount) { + throw new IllegalArgumentException("signature " + sig.getKey() + " is missing non-optional arg for " + args); + } + builder.startObject(); + builder.startArray("params"); + for (int i = 0; i < sig.getKey().size(); i++) { + EsqlFunctionRegistry.ArgSignature arg = args.get(i); + builder.startObject(); + builder.field("name", arg.name()); + builder.field("type", sig.getKey().get(i).typeName()); + builder.field("optional", arg.optional()); + builder.field("description", arg.description()); + builder.endObject(); + } + builder.endArray(); + builder.field("variadic", variadic); + builder.field("returnType", sig.getValue().typeName()); + builder.endObject(); + } + } + builder.endArray(); + + if (info.examples().length > 0) { + builder.startArray("examples"); + for (Example example : info.examples()) { + builder.value("read-example::" + example.file() + ".csv-spec[tag=" + example.tag() + ", json]"); + } + builder.endArray(); + } + + String rendered = Strings.toString(builder.endObject()); + LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", functionName(), rendered); + writeToTempDir("kibana/definition", rendered, "json"); + } + + private static String removeAsciidocLinks(String asciidoc) { + return asciidoc.replaceAll("[^ ]+\\[([^\\]]+)\\]", "$1"); + } + + private static List, DataType>> sortedSignatures() { + List, DataType>> sortedSignatures = new ArrayList<>(signatures().entrySet()); + Collections.sort(sortedSignatures, new Comparator<>() { + @Override + public int compare(Map.Entry, DataType> lhs, Map.Entry, DataType> rhs) { + int maxlen = Math.max(lhs.getKey().size(), rhs.getKey().size()); + for (int i = 0; i < maxlen; i++) { + if (lhs.getKey().size() <= i) { + return -1; + } + if (rhs.getKey().size() <= i) { + return 1; + } + int c = lhs.getKey().get(i).typeName().compareTo(rhs.getKey().get(i).typeName()); + if (c != 0) { + return c; + } + } + return lhs.getValue().typeName().compareTo(rhs.getValue().typeName()); + } + }); + return sortedSignatures; + } + protected static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { @@ -1287,7 +1384,7 @@ private static void writeToTempDir(String subdir, String str, String extension) Files.createDirectories(dir); Path file = dir.resolve(functionName() + "." + extension); Files.writeString(file, str); - LogManager.getLogger(getTestClass()).info("Wrote function types for [{}] to file: {}", functionName(), file); + LogManager.getLogger(getTestClass()).info("Wrote to file: {}", file); } private final List breakers = Collections.synchronizedList(new ArrayList<>()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index d600e51c07925..db26624bc66bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -72,7 +72,7 @@ public TestCaseSupplier(List types, Supplier supplier) { this(nameFromTypes(types), types, supplier); } - static String nameFromTypes(List types) { + public static String nameFromTypes(List types) { return types.stream().map(t -> "<" + t.typeName() + ">").collect(Collectors.joining(", ")); } @@ -1325,6 +1325,14 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Return a {@link TypedData} that always returns {@code null} for it's + * value without modifying anything else in the supplier. + */ + public TypedData forceValueToNull() { + return new TypedData(null, type, name, forceLiteral); + } + @Override public String toString() { if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index 4f897c47d73b8..98fbff6a816c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -7,28 +7,56 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.common.Rounding; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.SerializationTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.DateEsField; -import org.elasticsearch.xpack.ql.type.EsField; import java.time.Duration; import java.time.Instant; import java.time.Period; -import java.util.Collections; -import java.util.Map; +import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.createRounding; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.process; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateTruncTests extends AbstractFunctionTestCase { -public class DateTruncTests extends ESTestCase { + public DateTruncTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + long ts = toMillis("2023-02-17T10:25:33.38Z"); + List suppliers = List.of( + ofDatePeriod(Period.ofDays(1), ts, "2023-02-17T00:00:00.00Z"), + ofDatePeriod(Period.ofMonths(1), ts, "2023-02-01T00:00:00.00Z"), + ofDatePeriod(Period.ofYears(1), ts, "2023-01-01T00:00:00.00Z"), + ofDatePeriod(Period.ofDays(10), ts, "2023-02-12T00:00:00.00Z"), + // 7 days period should return weekly rounding + ofDatePeriod(Period.ofDays(7), ts, "2023-02-13T00:00:00.00Z"), + // 3 months period should return quarterly + ofDatePeriod(Period.ofMonths(3), ts, "2023-01-01T00:00:00.00Z"), + ofDuration(Duration.ofHours(1), ts, "2023-02-17T10:00:00.00Z"), + ofDuration(Duration.ofMinutes(1), ts, "2023-02-17T10:25:00.00Z"), + ofDuration(Duration.ofSeconds(1), ts, "2023-02-17T10:25:33.00Z"), + ofDuration(Duration.ofHours(3), ts, "2023-02-17T09:00:00.00Z"), + ofDuration(Duration.ofMinutes(15), ts, "2023-02-17T10:15:00.00Z"), + ofDuration(Duration.ofSeconds(30), ts, "2023-02-17T10:25:30.00Z"), + randomSecond() + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } public void testCreateRoundingDuration() { Rounding.Prepared rounding; @@ -71,7 +99,7 @@ public void testCreateRoundingPeriod() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); e = expectThrows(IllegalArgumentException.class, () -> createRounding(Period.of(0, 1, 1))); - assertThat(e.getMessage(), containsString("Time interval is not supported")); + assertThat(e.getMessage(), containsString("Time interval with multiple periods is not supported")); rounding = createRounding(Period.ofDays(1)); assertEquals(1, rounding.roundingSize(Rounding.DateTimeUnit.DAY_OF_MONTH), 0d); @@ -103,25 +131,6 @@ public void testCreateRoundingNullInterval() { public void testDateTruncFunction() { long ts = toMillis("2023-02-17T10:25:33.38Z"); - assertEquals(toMillis("2023-02-17T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(1)))); - assertEquals(toMillis("2023-02-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(1)))); - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofYears(1)))); - - assertEquals(toMillis("2023-02-12T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(10)))); - // 7 days period should return weekly rounding - assertEquals(toMillis("2023-02-13T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(7)))); - // 3 months period should return quarterly - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(3)))); - - assertEquals(toMillis("2023-02-17T10:00:00.00Z"), process(ts, createRounding(Duration.ofHours(1)))); - assertEquals(toMillis("2023-02-17T10:25:00.00Z"), process(ts, createRounding(Duration.ofMinutes(1)))); - assertEquals(toMillis("2023-02-17T10:25:33.00Z"), process(ts, createRounding(Duration.ofSeconds(1)))); - - assertEquals(toMillis("2023-02-17T09:00:00.00Z"), process(ts, createRounding(Duration.ofHours(3)))); - assertEquals(toMillis("2023-02-17T10:15:00.00Z"), process(ts, createRounding(Duration.ofMinutes(15)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> process(ts, createRounding(Period.ofDays(-1)))); assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); @@ -129,36 +138,71 @@ public void testDateTruncFunction() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); } - private static long toMillis(String timestamp) { - return Instant.parse(timestamp).toEpochMilli(); + private static TestCaseSupplier ofDatePeriod(Period period, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.DATE_PERIOD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(period, EsqlDataTypes.DATE_PERIOD, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); + } + + private static TestCaseSupplier ofDuration(Duration duration, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(duration, EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); } - public void testSerialization() { - var dateTrunc = new DateTrunc(Source.EMPTY, randomDateIntervalLiteral(), randomDateField()); - SerializationTestUtils.assertSerialization(dateTrunc); + private static TestCaseSupplier randomSecond() { + return new TestCaseSupplier("random second", List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), () -> { + String dateFragment = randomIntBetween(2000, 2050) + + "-" + + pad(randomIntBetween(1, 12)) + + "-" + + pad(randomIntBetween(1, 28)) + + "T" + + pad(randomIntBetween(0, 23)) + + ":" + + pad(randomIntBetween(0, 59)) + + ":" + + pad(randomIntBetween(0, 59)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Duration.ofSeconds(1), EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(dateFragment + ".00Z")) + ); + }); } - private static FieldAttribute randomDateField() { - String fieldName = randomAlphaOfLength(randomIntBetween(1, 25)); - String dateName = randomAlphaOfLength(randomIntBetween(1, 25)); - boolean hasDocValues = randomBoolean(); - if (randomBoolean()) { - return new FieldAttribute(Source.EMPTY, fieldName, new EsField(dateName, DataTypes.DATETIME, Map.of(), hasDocValues)); - } else { - return new FieldAttribute(Source.EMPTY, fieldName, DateEsField.dateEsField(dateName, Collections.emptyMap(), hasDocValues)); - } + private static String pad(int i) { + return i > 9 ? "" + i : "0" + i; + } + + private static long toMillis(String timestamp) { + return Instant.parse(timestamp).toEpochMilli(); } - private static Literal randomDateIntervalLiteral() { - Duration duration = switch (randomInt(5)) { - case 0 -> Duration.ofNanos(randomIntBetween(1, 100000)); - case 1 -> Duration.ofMillis(randomIntBetween(1, 1000)); - case 2 -> Duration.ofSeconds(randomIntBetween(1, 1000)); - case 3 -> Duration.ofMinutes(randomIntBetween(1, 1000)); - case 4 -> Duration.ofHours(randomIntBetween(1, 100)); - case 5 -> Duration.ofDays(randomIntBetween(1, 60)); - default -> throw new AssertionError(); - }; - return new Literal(Source.EMPTY, duration, EsqlDataTypes.TIME_DURATION); + @Override + protected Expression build(Source source, List args) { + return new DateTrunc(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java deleted file mode 100644 index 013753c801c39..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Rounding; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; - -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; - -public class AutoBucketTests extends AbstractScalarFunctionTestCase { - public AutoBucketTests(@Name("TestCase") Supplier testCaseSupplier) { - this.testCase = testCaseSupplier.get(); - } - - @ParametersFactory - public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Autobucket Single date", () -> { - List args = List.of( - new TestCaseSupplier.TypedData( - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), - DataTypes.DATETIME, - "arg" - ) - ); - return new TestCaseSupplier.TestCase( - args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", - DataTypes.DATETIME, - dateResultsMatcher(args) - ); - }), new TestCaseSupplier("Autobucket Single long", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100L, DataTypes.LONG, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastLongToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single int", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100, DataTypes.INTEGER, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastIntToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single double", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100.0, DataTypes.DOUBLE, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=Attribute[channel=0], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }))); - } - - private Expression build(Source source, Expression arg) { - Literal from; - Literal to; - if (arg.dataType() == DataTypes.DATETIME) { - from = stringOrDateTime("2023-02-01T00:00:00.00Z"); - to = stringOrDateTime("2023-03-01T09:00:00.00Z"); - } else { - from = new Literal(Source.EMPTY, 0, DataTypes.DOUBLE); - to = new Literal(Source.EMPTY, 1000, DataTypes.DOUBLE); - } - return new AutoBucket(source, arg, new Literal(Source.EMPTY, 50, DataTypes.INTEGER), from, to); - } - - private Literal stringOrDateTime(String date) { - if (randomBoolean()) { - return new Literal(Source.EMPTY, new BytesRef(date), randomBoolean() ? DataTypes.KEYWORD : DataTypes.TEXT); - } - return new Literal(Source.EMPTY, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date), DataTypes.DATETIME); - } - - @Override - protected DataType expectedType(List argTypes) { - if (argTypes.get(0).isNumeric()) { - return DataTypes.DOUBLE; - } - return argTypes.get(0); - } - - private static Matcher dateResultsMatcher(List typedData) { - long millis = ((Number) typedData.get(0).data()).longValue(); - return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); - } - - private static Matcher numericResultsMatcher(List typedData, Object value) { - return equalTo(value); - } - - @Override - protected List argSpec() { - DataType[] numerics = numerics(); - DataType[] all = new DataType[numerics.length + 1]; - all[0] = DataTypes.DATETIME; - System.arraycopy(numerics, 0, all, 1, numerics.length); - return List.of(required(all)); - } - - @Override - protected Expression build(Source source, List args) { - return build(source, args.get(0)); - } - - @Override - protected Matcher badTypeError(List spec, int badArgPosition, DataType badArgType) { - return equalTo("first argument of [exp] must be [datetime or numeric], found value [arg0] type [" + badArgType.typeName() + "]"); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java new file mode 100644 index 0000000000000..23122863b95f3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Rounding; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.LongSupplier; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class BucketTests extends AbstractFunctionTestCase { + public BucketTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); + numberCases(suppliers, "fixed int", DataTypes.INTEGER, () -> 100); + numberCases(suppliers, "fixed double", DataTypes.DOUBLE, () -> 100.0); + // TODO make errorsForCasesWithoutExamples do something sensible for 4+ parameters + return parameterSuppliersFromTypedData( + anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL + ? DataTypes.NULL + : original.expectedType(), + (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + ) + ); + } + + // TODO once we cast above the functions we can drop these + private static final DataType[] DATE_BOUNDS_TYPE = new DataType[] { DataTypes.DATETIME, DataTypes.KEYWORD, DataTypes.TEXT }; + + private static void dateCases(List suppliers, String name, LongSupplier date) { + for (DataType fromType : DATE_BOUNDS_TYPE) { + for (DataType toType : DATE_BOUNDS_TYPE) { + suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); + args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); + return new TestCaseSupplier.TestCase( + args, + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + DataTypes.DATETIME, + dateResultsMatcher(args) + ); + })); + } + } + } + + private static TestCaseSupplier.TypedData dateBound(String name, DataType type, String date) { + Object value; + if (type == DataTypes.DATETIME) { + value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); + } else { + value = new BytesRef(date); + } + return new TestCaseSupplier.TypedData(value, type, name).forceLiteral(); + } + + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE }; + + private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { + for (DataType fromType : NUMBER_BOUNDS_TYPES) { + for (DataType toType : NUMBER_BOUNDS_TYPES) { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(number.get(), "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(numericBound("from", fromType, 0.0)); + args.add(numericBound("to", toType, 1000.0)); + // TODO more number types for "from" and "to" + String attr = "Attribute[channel=0]"; + if (numberType == DataTypes.INTEGER) { + attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; + } else if (numberType == DataTypes.LONG) { + attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; + } + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=" + + attr + + ", " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + dateResultsMatcher(args) + ); + })); + } + } + } + + private static TestCaseSupplier.TypedData numericBound(String name, DataType type, double value) { + Number v; + if (type == DataTypes.INTEGER) { + v = (int) value; + } else if (type == DataTypes.LONG) { + v = (long) value; + } else { + v = value; + } + return new TestCaseSupplier.TypedData(v, type, name).forceLiteral(); + } + + private static Matcher dateResultsMatcher(List typedData) { + if (typedData.get(0).type() == DataTypes.DATETIME) { + long millis = ((Number) typedData.get(0).data()).longValue(); + return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + } + return equalTo(((Number) typedData.get(0).data()).doubleValue()); + } + + @Override + protected Expression build(Source source, List args) { + return new Bucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); + } + + @Override + public void testSimpleWithNulls() { + assumeFalse("we test nulls in parameters", true); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index b95f05039630a..a7f4ca0342782 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -11,22 +11,21 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.function.Function; import java.util.function.Supplier; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; /** @@ -37,192 +36,175 @@ public LocateTests(@Name("TestCase") Supplier testCas this.testCase = testCaseSupplier.get(); } + private static final DataType[] STRING_TYPES = new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }; + @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add( - supplier( - "keywords", - DataTypes.KEYWORD, - DataTypes.KEYWORD, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "mixed keyword, text", - DataTypes.KEYWORD, - DataTypes.TEXT, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "texts", - DataTypes.TEXT, - DataTypes.TEXT, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "mixed text, keyword", - DataTypes.TEXT, - DataTypes.KEYWORD, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); - } - - public void testToString() { - assertThat( - evaluator( - new Locate( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - field("substr", DataTypes.KEYWORD), - field("start", DataTypes.INTEGER) - ) - ).get(driverContext()).toString(), - equalTo("LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]") - ); - } - - @Override - protected Expression build(Source source, List args) { - return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); - } - - public void testPrefixString() { - assertThat(process("a tiger", "a t", 0), equalTo(1)); - assertThat(process("a tiger", "a", 0), equalTo(1)); - assertThat(process("界世", "界", 0), equalTo(1)); - } - - public void testSuffixString() { - assertThat(process("a tiger", "er", 0), equalTo(6)); - assertThat(process("a tiger", "r", 0), equalTo(7)); - assertThat(process("世界", "界", 0), equalTo(2)); - } - - public void testMidString() { - assertThat(process("a tiger", "ti", 0), equalTo(3)); - assertThat(process("a tiger", "ige", 0), equalTo(4)); - assertThat(process("世界世", "界", 0), equalTo(2)); - } - - public void testOutOfRange() { - assertThat(process("a tiger", "tigers", 0), equalTo(0)); - assertThat(process("a tiger", "ipa", 0), equalTo(0)); - assertThat(process("世界世", "\uD83C\uDF0D", 0), equalTo(0)); - } - - public void testExactString() { - assertThat(process("a tiger", "a tiger", 0), equalTo(1)); - assertThat(process("tigers", "tigers", 0), equalTo(1)); - assertThat(process("界世", "界世", 0), equalTo(1)); - } + for (DataType strType : STRING_TYPES) { + for (DataType substrType : STRING_TYPES) { + suppliers.add( + supplier( + "", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> randomRealisticUnicodeOfCodepointLength(2), + null, + (str, substr, start) -> 1 + str.indexOf(substr) + ) + ); + suppliers.add( + supplier( + "exact match ", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> str, + null, + (str, substr, start) -> 1 + ) + ); + suppliers.add( + supplier( + "", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> randomRealisticUnicodeOfCodepointLength(2), + () -> between(0, 3), + (str, substr, start) -> 1 + str.indexOf(substr, start) + ) + ); + } + } - public void testSupplementaryCharacter() { + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers)); + + // Here follows some non-randomized examples that we want to cover on every run + suppliers.add(supplier("a tiger", "a t", null, 1)); + suppliers.add(supplier("a tiger", "a", null, 1)); + suppliers.add(supplier("界世", "界", null, 1)); + suppliers.add(supplier("a tiger", "er", null, 6)); + suppliers.add(supplier("a tiger", "r", null, 7)); + suppliers.add(supplier("界世", "世", null, 2)); + suppliers.add(supplier("a tiger", "ti", null, 3)); + suppliers.add(supplier("a tiger", "ige", null, 4)); + suppliers.add(supplier("世界世", "界", null, 2)); + suppliers.add(supplier("a tiger", "tigers", null, 0)); + suppliers.add(supplier("a tiger", "ipa", null, 0)); + suppliers.add(supplier("世界世", "\uD83C\uDF0D", null, 0)); + + // Extra assertions about 4-byte characters // some assertions about the supplementary (4-byte) character we'll use for testing assert "𠜎".length() == 2; assert "𠜎".codePointCount(0, 2) == 1; - assert "𠜎".getBytes(UTF_8).length == 4; - - assertThat(process("a ti𠜎er", "𠜎er", 0), equalTo(5)); - assertThat(process("a ti𠜎er", "i𠜎e", 0), equalTo(4)); - assertThat(process("a ti𠜎er", "ti𠜎", 0), equalTo(3)); - assertThat(process("a ti𠜎er", "er", 0), equalTo(6)); - assertThat(process("a ti𠜎er", "r", 0), equalTo(7)); - - assertThat(process("𠜎a ti𠜎er", "𠜎er", 0), equalTo(6)); - assertThat(process("𠜎a ti𠜎er", "i𠜎e", 0), equalTo(5)); - assertThat(process("𠜎a ti𠜎er", "ti𠜎", 0), equalTo(4)); - assertThat(process("𠜎a ti𠜎er", "er", 0), equalTo(7)); - assertThat(process("𠜎a ti𠜎er", "r", 0), equalTo(8)); - - // exact - assertThat(process("a ti𠜎er", "a ti𠜎er", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎abc", 0), equalTo(1)); - assertThat(process(" 𠜎𠜎𠜎abc", " 𠜎𠜎𠜎abc", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎 abc ", "𠜎𠜎𠜎 abc ", 0), equalTo(1)); - + assert "𠜎".getBytes(StandardCharsets.UTF_8).length == 4; + suppliers.add(supplier("a ti𠜎er", "𠜎er", null, 5)); + suppliers.add(supplier("a ti𠜎er", "i𠜎e", null, 4)); + suppliers.add(supplier("a ti𠜎er", "ti𠜎", null, 3)); + suppliers.add(supplier("a ti𠜎er", "er", null, 6)); + suppliers.add(supplier("a ti𠜎er", "r", null, 7)); + suppliers.add(supplier("a ti𠜎er", "a ti𠜎er", null, 1)); // prefix - assertThat(process("𠜎abc", "𠜎", 0), equalTo(1)); - assertThat(process("𠜎 abc", "𠜎 ", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", 0), equalTo(1)); - assertThat(process(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", 0), equalTo(1)); - assertThat(process("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", 0), equalTo(1)); - + suppliers.add(supplier("𠜎abc", "𠜎", null, 1)); + suppliers.add(supplier("𠜎 abc", "𠜎 ", null, 1)); + suppliers.add(supplier("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", null, 1)); + suppliers.add(supplier("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", null, 1)); + suppliers.add(supplier(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", null, 1)); + suppliers.add(supplier("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", null, 1)); // suffix - assertThat(process("abc𠜎", "𠜎", 0), equalTo(4)); - assertThat(process("abc 𠜎", " 𠜎", 0), equalTo(4)); - assertThat(process("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(4)); - assertThat(process("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", 0), equalTo(4)); - assertThat(process("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", 0), equalTo(4)); - + suppliers.add(supplier("abc𠜎", "𠜎", null, 4)); + suppliers.add(supplier("abc 𠜎", " 𠜎", null, 4)); + suppliers.add(supplier("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", null, 4)); + suppliers.add(supplier("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", null, 4)); + suppliers.add(supplier("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", null, 4)); // out of range - assertThat(process("𠜎a ti𠜎er", "𠜎a ti𠜎ers", 0), equalTo(0)); - assertThat(process("a ti𠜎er", "aa ti𠜎er", 0), equalTo(0)); - assertThat(process("abc𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(0)); + suppliers.add(supplier("𠜎a ti𠜎er", "𠜎a ti𠜎ers", null, 0)); + suppliers.add(supplier("a ti𠜎er", "aa ti𠜎er", null, 0)); + suppliers.add(supplier("abc𠜎𠜎", "𠜎𠜎𠜎", null, 0)); assert "🐱".length() == 2 && "🐶".length() == 2; assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; - assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; - assertThat(process("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", 0), equalTo(1)); - assertThat(process("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0), equalTo(2)); - assertThat(process("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0), equalTo(3)); + assert "🐱".getBytes(StandardCharsets.UTF_8).length == 4 && "🐶".getBytes(StandardCharsets.UTF_8).length == 4; + suppliers.add(supplier("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", null, 1)); + suppliers.add(supplier("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0, 2)); + suppliers.add(supplier("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0, 3)); + + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); } - private Integer process(String str, String substr, Integer start) { - try ( - EvalOperator.ExpressionEvaluator eval = evaluator( - new Locate( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - field("substr", DataTypes.KEYWORD), - new Literal(Source.EMPTY, start, DataTypes.INTEGER) - ) - ).get(driverContext()); - Block block = eval.eval(row(List.of(new BytesRef(str), new BytesRef(substr)))) - ) { - return block.isNull(0) ? Integer.valueOf(0) : ((Integer) toJavaObject(block, 0)); + private static TestCaseSupplier supplier(String str, String substr, @Nullable Integer start, @Nullable Integer expectedValue) { + String name = String.format(Locale.ROOT, "\"%s\" in \"%s\"", substr, str); + if (start != null) { + name += " starting at " + start; } + + return new TestCaseSupplier( + name, + types(DataTypes.KEYWORD, DataTypes.KEYWORD, start != null), + () -> testCase(DataTypes.KEYWORD, DataTypes.KEYWORD, str, substr, start, expectedValue) + ); + } + + interface ExpectedValue { + int expectedValue(String str, String substr, Integer start); } private static TestCaseSupplier supplier( String name, - DataType firstType, - DataType secondType, + DataType strType, + DataType substrType, Supplier strValueSupplier, - Supplier substrValueSupplier, - Supplier startSupplier + Function substrValueSupplier, + @Nullable Supplier startSupplier, + ExpectedValue expectedValue ) { - return new TestCaseSupplier(name, List.of(firstType, secondType), () -> { - List values = new ArrayList<>(); - String expectedToString = "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; - - String value = strValueSupplier.get(); - values.add(new TestCaseSupplier.TypedData(new BytesRef(value), firstType, "0")); + List types = types(strType, substrType, startSupplier != null); + return new TestCaseSupplier(name + TestCaseSupplier.nameFromTypes(types), types, () -> { + String str = strValueSupplier.get(); + String substr = substrValueSupplier.apply(str); + Integer start = startSupplier == null ? null : startSupplier.get(); + return testCase(strType, substrType, str, substr, start, expectedValue.expectedValue(str, substr, start)); + }); + } - String substrValue = substrValueSupplier.get(); - values.add(new TestCaseSupplier.TypedData(new BytesRef(substrValue), secondType, "1")); + private static String expectedToString(boolean hasStart) { + if (hasStart) { + return "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; + } + return "LocateNoStartEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1]]"; + } - Integer startValue = startSupplier.get(); - values.add(new TestCaseSupplier.TypedData(startValue, DataTypes.INTEGER, "2")); + private static List types(DataType firstType, DataType secondType, boolean hasStart) { + List types = new ArrayList<>(); + types.add(firstType); + types.add(secondType); + if (hasStart) { + types.add(DataTypes.INTEGER); + } + return types; + } - int expectedValue = 1 + value.indexOf(substrValue); - return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.INTEGER, equalTo(expectedValue)); - }); + private static TestCaseSupplier.TestCase testCase( + DataType strType, + DataType substrType, + String str, + String substr, + Integer start, + Integer expectedValue + ) { + List values = new ArrayList<>(); + values.add(new TestCaseSupplier.TypedData(str == null ? null : new BytesRef(str), strType, "str")); + values.add(new TestCaseSupplier.TypedData(substr == null ? null : new BytesRef(substr), substrType, "substr")); + if (start != null) { + values.add(new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start")); + } + return new TestCaseSupplier.TestCase(values, expectedToString(start != null), DataTypes.INTEGER, equalTo(expectedValue)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index eb3901f37b99a..7de3308fcab16 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -156,6 +157,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -3297,26 +3299,26 @@ public void testNestedExpressionsInStatsWithExpression() { assertThat(Expressions.names(fields), contains("languages + emp_no")); } - public void testLogicalPlanOptimizerVerifier() { + public void testBucketAcceptsEvalLiteralReferences() { var plan = plan(""" from test | eval bucket_start = 1, bucket_end = 100000 - | eval auto_bucket(salary, 10, bucket_start, bucket_end) + | eval bucket(salary, 10, bucket_start, bucket_end) """); var ab = as(plan, Eval.class); assertTrue(ab.optimized()); } - public void testLogicalPlanOptimizerVerificationException() { + public void testBucketFailsOnFieldArgument() { VerificationException e = expectThrows(VerificationException.class, () -> plan(""" from test | eval bucket_end = 100000 - | eval auto_bucket(salary, 10, emp_no, bucket_end) + | eval bucket(salary, 10, emp_no, bucket_end) """)); assertTrue(e.getMessage().startsWith("Found ")); final String header = "Found 1 problem\nline "; assertEquals( - "3:32: third argument of [auto_bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", + "3:27: third argument of [bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", e.getMessage().substring(header.length()) ); } @@ -3832,12 +3834,11 @@ public void testNoWrongIsNotNullPruning() { * * For DISSECT expects the following; the others are similar. * - * EsqlProject[[first_name{f}#37, emp_no{r}#33, salary{r}#34]] - * \_TopN[[Order[$$emp_no$temp_name$36{r}#46 + $$salary$temp_name$41{r}#47 * 13[INTEGER],ASC,LAST], Order[NEG($$salary$t - * emp_name$41{r}#47),DESC,FIRST]],3[INTEGER]] - * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, parser=org.elasticsearch.dissect.Dissect - * Parser@b6858b],[emp_no{r}#33, salary{r}#34]] - * \_Eval[[emp_no{f}#36 AS $$emp_no$temp_name$36, salary{f}#41 AS $$salary$temp_name$41]] + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] */ public void testPushdownWithOverwrittenName() { @@ -3850,7 +3851,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepAfter = """ FROM test - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | KEEP first_name, emp_no, salary | LIMIT 3 @@ -3859,7 +3860,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepFirst = """ FROM test | KEEP emp_no, salary, first_name - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | LIMIT 3 """; @@ -3876,20 +3877,27 @@ public void testPushdownWithOverwrittenName() { assertThat(projections.get(2).name(), equalTo("salary")); var topN = as(project.child(), TopN.class); - assertThat(topN.order().size(), is(2)); + assertThat(topN.order().size(), is(3)); - var firstOrderExpr = as(topN.order().get(0), Order.class); - var mul = as(firstOrderExpr.child(), Mul.class); - var add = as(mul.left(), Add.class); - var renamed_emp_no = as(add.left(), ReferenceAttribute.class); - var renamed_salary = as(add.right(), ReferenceAttribute.class); + var firstOrder = as(topN.order().get(0), Order.class); + assertThat(firstOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamed_emp_no = as(firstOrder.child(), ReferenceAttribute.class); assertThat(renamed_emp_no.toString(), startsWith("$$emp_no$temp_name")); + + var secondOrder = as(topN.order().get(1), Order.class); + assertThat(secondOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_salary = as(secondOrder.child(), ReferenceAttribute.class); assertThat(renamed_salary.toString(), startsWith("$$salary$temp_name")); - var secondOrderExpr = as(topN.order().get(1), Order.class); - var neg = as(secondOrderExpr.child(), Neg.class); - var renamed_salary2 = as(neg.field(), ReferenceAttribute.class); - assert (renamed_salary2.semanticEquals(renamed_salary) && renamed_salary2.equals(renamed_salary)); + var thirdOrder = as(topN.order().get(2), Order.class); + assertThat(thirdOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(thirdOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_emp_no2 = as(thirdOrder.child(), ReferenceAttribute.class); + assertThat(renamed_emp_no2.toString(), startsWith("$$emp_no$temp_name")); + + assert (renamed_emp_no2.semanticEquals(renamed_emp_no) && renamed_emp_no2.equals(renamed_emp_no)); Eval renamingEval = null; if (overwritingCommand.startsWith("EVAL")) { @@ -3913,8 +3921,210 @@ public void testPushdownWithOverwrittenName() { for (Alias field : renamingEval.fields()) { attributesCreatedInEval.add(field.toAttribute()); } - assert (attributesCreatedInEval.contains(renamed_emp_no)); - assert (attributesCreatedInEval.contains(renamed_salary)); + assertThat(attributesCreatedInEval, allOf(hasItem(renamed_emp_no), hasItem(renamed_salary), hasItem(renamed_emp_no2))); + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 3 for the other overwritingCommands + // emp_no ASC nulls first + Alias empNoAsc = renamingEval.fields().get(0); + assertThat(empNoAsc.toAttribute(), equalTo(renamed_emp_no)); + var emp_no = as(empNoAsc.child(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + + // salary DESC nulls last + Alias salaryDesc = renamingEval.fields().get(1); + assertThat(salaryDesc.toAttribute(), equalTo(renamed_salary)); + var salary_desc = as(salaryDesc.child(), FieldAttribute.class); + assertThat(salary_desc.name(), equalTo("salary")); + + assertThat(renamingEval.child(), instanceOf(EsRelation.class)); + } + } + + /** + * Expects + * Project[[min{r}#4, languages{f}#11]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#18,ASC,LAST]],1000[INTEGER]] + * \_Eval[[min{r}#4 + languages{f}#11 AS $$order_by$temp_name$0]] + * \_Aggregate[[languages{f}#11],[MIN(salary{f}#13) AS min, languages{f}#11]] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + */ + public void testReplaceSortByExpressionsWithStats() { + var plan = optimizedPlan(""" + from test + | stats min = min(salary) by languages + | sort min + languages + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("min", "languages")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(1)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + var aggregate = as(eval.child(), Aggregate.class); + var aggregates = aggregate.aggregates(); + assertThat(Expressions.names(aggregates), contains("min", "languages")); + var unwrapped = Alias.unwrap(aggregates.get(0)); + var min = as(unwrapped, Min.class); + as(aggregate.child(), EsRelation.class); + } + + /** + * Expects + * + * Project[[salary{f}#19, languages{f}#17, emp_no{f}#14]] + * \_TopN[[Order[$$order_by$0$0{r}#24,ASC,LAST], Order[emp_no{f}#14,DESC,FIRST]],1000[INTEGER]] + * \_Eval[[salary{f}#19 / 10000[INTEGER] + languages{f}#17 AS $$order_by$0$0]] + * \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] + */ + public void testReplaceSortByExpressionsMultipleSorts() { + var plan = optimizedPlan(""" + from test + | sort salary/10000 + languages, emp_no desc + | eval d = emp_no + | sort salary/10000 + languages, d desc + | keep salary, languages, emp_no + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("salary", "languages", "emp_no")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + ReferenceAttribute expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + order = as(topN.order().get(1), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + FieldAttribute empNo = as(order.child(), FieldAttribute.class); + assertThat(empNo.name(), equalTo("emp_no")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(fields.size(), equalTo(1)); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + Alias salaryAddLanguages = eval.fields().get(0); + var add = as(salaryAddLanguages.child(), Add.class); + var div = as(add.left(), Div.class); + var salary = as(div.left(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _10000 = as(div.right(), Literal.class); + assertThat(_10000.value(), equalTo(10000)); + var languages = as(add.right(), FieldAttribute.class); + assertThat(languages.name(), equalTo("languages")); + + as(eval.child(), EsRelation.class); + } + + /** + * For DISSECT expects the following; the others are similar. + * + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] + * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] + */ + public void testReplaceSortByExpressions() { + List overwritingCommands = List.of( + "EVAL emp_no = 3*emp_no, salary = -2*emp_no-salary", + "DISSECT first_name \"%{emp_no} %{salary}\"", + "GROK first_name \"%{WORD:emp_no} %{WORD:salary}\"", + "ENRICH languages_idx ON first_name WITH emp_no = language_code, salary = language_code" + ); + + String queryTemplateKeepAfter = """ + FROM test + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | KEEP first_name, emp_no, salary + | LIMIT 3 + """; + // Equivalent but with KEEP first - ensures that attributes in the final projection are correct after pushdown rules were applied. + String queryTemplateKeepFirst = """ + FROM test + | KEEP emp_no, salary, first_name + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | LIMIT 3 + """; + + for (String overwritingCommand : overwritingCommands) { + String queryTemplate = randomBoolean() ? queryTemplateKeepFirst : queryTemplateKeepAfter; + var plan = optimizedPlan(LoggerMessageFormat.format(null, queryTemplate, overwritingCommand)); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(projections.size(), equalTo(3)); + assertThat(projections.get(0).name(), equalTo("first_name")); + assertThat(projections.get(1).name(), equalTo("emp_no")); + assertThat(projections.get(2).name(), equalTo("salary")); + + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var firstOrderExpr = as(topN.order().get(0), Order.class); + assertThat(firstOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamedEmpNoSalaryExpression = as(firstOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedEmpNoSalaryExpression.toString(), startsWith("$$order_by$0$")); + + var secondOrderExpr = as(topN.order().get(1), Order.class); + assertThat(secondOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamedNegatedSalaryExpression = as(secondOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedNegatedSalaryExpression.toString(), startsWith("$$order_by$1$")); + + Eval renamingEval = null; + if (overwritingCommand.startsWith("EVAL")) { + // Multiple EVALs should be merged, so there's only one. + renamingEval = as(topN.child(), Eval.class); + } + if (overwritingCommand.startsWith("DISSECT")) { + var dissect = as(topN.child(), Dissect.class); + renamingEval = as(dissect.child(), Eval.class); + } + if (overwritingCommand.startsWith("GROK")) { + var grok = as(topN.child(), Grok.class); + renamingEval = as(grok.child(), Eval.class); + } + if (overwritingCommand.startsWith("ENRICH")) { + var enrich = as(topN.child(), Enrich.class); + renamingEval = as(enrich.child(), Eval.class); + } + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 2 for the other overwritingCommands + + // 13*(emp_no+salary) + Alias _13empNoSalary = renamingEval.fields().get(0); + assertThat(_13empNoSalary.toAttribute(), equalTo(renamedEmpNoSalaryExpression)); + var mul = as(_13empNoSalary.child(), Mul.class); + var add = as(mul.left(), Add.class); + var emp_no = as(add.left(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + var salary = as(add.right(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _13 = as(mul.right(), Literal.class); + assertThat(_13.value(), equalTo(13)); + + // -salary + Alias negatedSalary = renamingEval.fields().get(1); + assertThat(negatedSalary.toAttribute(), equalTo(renamedNegatedSalaryExpression)); + var neg = as(negatedSalary.child(), Neg.class); + assertThat(neg.field(), equalTo(salary)); assertThat(renamingEval.child(), instanceOf(EsRelation.class)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 17ad5eb8b9f3d..ff6c60310fd87 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -212,11 +212,11 @@ public void testIn2() { | limit 4""", Set.of("hire_date", "hire_date.*", "birth_date", "birth_date.*")); } - public void testAutoBucketMonth() { + public void testBucketMonth() { assertFieldNames(""" from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" - | eval hd = auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") + | eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd""", Set.of("hire_date", "hire_date.*")); } @@ -228,11 +228,11 @@ public void testBorn_before_today() { ); } - public void testAutoBucketMonthInAgg() { + public void testBucketMonthInAgg() { assertFieldNames(""" FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" - | EVAL bucket = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") + | EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS AVG(salary) BY bucket | SORT bucket""", Set.of("salary", "salary.*", "hire_date", "hire_date.*")); } @@ -554,11 +554,11 @@ public void testConvertFromDatetime() { ); } - public void testAutoBucket() { + public void testBucket() { assertFieldNames(""" FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" - | EVAL bh = auto_bucket(height, 20, 1.41, 2.10) + | EVAL bh = bucket(height, 20, 1.41, 2.10) | SORT hire_date | KEEP hire_date, height, bh""", Set.of("hire_date", "hire_date.*", "height", "height.*")); } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e0ce1f92b2a37..d30c249813cd2 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -362,9 +362,10 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); - return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new); + return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new, isIndexed()); }; } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java index 0a312933768fb..2f936531f8c72 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java @@ -17,13 +17,16 @@ public class UnsignedLongIndexFieldData extends IndexNumericFieldData { private final IndexNumericFieldData signedLongIFD; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; UnsignedLongIndexFieldData( IndexNumericFieldData signedLongFieldData, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.signedLongIFD = signedLongFieldData; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -51,6 +54,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + protected boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return NumericType.LONG; diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java index 20ca6d8847d79..4fe3ed61114c3 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.threadpool.ThreadPool; @@ -35,7 +34,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase { @@ -46,7 +44,6 @@ public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase public void setUpMocks() { jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); } public void testTriggerDeleteJobsInStateDeletingWithoutDeletionTask() throws InterruptedException { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java index 0a7cee96df145..30f84a97bcfb0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java @@ -39,13 +39,11 @@ public class MlInitializationServiceIT extends MlNativeAutodetectIntegTestCase { - private ThreadPool threadPool; private MlInitializationService mlInitializationService; @Before public void setUpMocks() { - threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + final var threadPool = mock(ThreadPool.class); when(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); MlDailyMaintenanceService mlDailyMaintenanceService = mock(MlDailyMaintenanceService.class); ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index dab2010035b66..c849e69c780bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -173,7 +174,7 @@ private void makeMlInternalIndicesHidden() { String[] mlHiddenIndexPatterns = MachineLearning.getMlHiddenIndexPatterns(); // Step 5: Handle errors encountered on the way. - ActionListener finalListener = ActionListener.wrap(updateAliasesResponse -> { + ActionListener finalListener = ActionListener.wrap(updateAliasesResponse -> { if (updateAliasesResponse.isAcknowledged() == false) { logger.warn("One or more of the ML internal aliases could not be made hidden."); return; @@ -194,7 +195,7 @@ private void makeMlInternalIndicesHidden() { } if (indicesAliasesRequest.getAliasActions().isEmpty()) { logger.debug("There are no ML internal aliases that need to be made hidden, [{}]", getAliasesResponse.getAliases()); - finalListener.onResponse(AcknowledgedResponse.TRUE); + finalListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return; } String indicesWithNonHiddenAliasesString = indicesAliasesRequest.getAliasActions() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index 577bbe3dac6ce..b9cc1902b7ab6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -287,7 +288,7 @@ public void deleteJobDocuments( AtomicReference indexNames = new AtomicReference<>(); - final ActionListener completionHandler = ActionListener.wrap( + final ActionListener completionHandler = ActionListener.wrap( response -> finishedHandler.accept(response.isAcknowledged()), failureHandler ); @@ -295,7 +296,7 @@ public void deleteJobDocuments( // Step 9. If we did not drop the indices and after DBQ state done, we delete the aliases ActionListener dbqHandler = ActionListener.wrap(bulkByScrollResponse -> { if (bulkByScrollResponse == null) { // no action was taken by DBQ, assume indices were deleted - completionHandler.onResponse(AcknowledgedResponse.TRUE); + completionHandler.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); } else { if (bulkByScrollResponse.isTimedOut()) { logger.warn("[{}] DeleteByQuery for indices [{}] timed out.", jobId, String.join(", ", indexNames.get())); @@ -469,7 +470,7 @@ private void deleteResultsByQuery( executeAsyncWithOrigin(client, ML_ORIGIN, RefreshAction.INSTANCE, refreshRequest, refreshListener); } - private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, ActionListener finishedHandler) { + private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, ActionListener finishedHandler) { final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); @@ -486,7 +487,7 @@ private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, Action if (removeRequest == null) { // don't error if the job's aliases have already been deleted - carry on and delete the // rest of the job's data - finishedHandler.onResponse(AcknowledgedResponse.TRUE); + finishedHandler.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return; } executeAsyncWithOrigin( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 1abb466a20f1a..50342a7bf99e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -344,7 +345,7 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen client.threadPool().getThreadContext(), ML_ORIGIN, request, - ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), + ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), client.admin().indices()::aliases ); }, finalListener::onFailure); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java similarity index 99% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java index ef5198499ff09..183ef3786a62d 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java similarity index 96% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java index 20519d53459ba..49a5cfa7ca067 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java similarity index 99% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java index 30de2173e8903..9de148c33c467 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java similarity index 98% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java index f3417dbf5d472..27fe2b8acb79b 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java similarity index 98% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java index 05d0e1cb0471b..ab5bbc3812eb5 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java similarity index 89% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java index 3a033e2686b2b..1953007a6c39a 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java @@ -5,10 +5,11 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; import java.nio.file.Path; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java similarity index 97% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java index 58b018a13e096..67825f6ce8570 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -25,6 +25,8 @@ import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; import org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin; import org.elasticsearch.xpack.versionfield.VersionFieldPlugin; import org.junit.After; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 0615bef7a4980..6962dedb734ae 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -35,6 +35,25 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.profiling.action.GetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.GetStatusAction; +import org.elasticsearch.xpack.profiling.action.GetTopNFunctionsAction; +import org.elasticsearch.xpack.profiling.action.ProfilingInfoTransportAction; +import org.elasticsearch.xpack.profiling.action.ProfilingLicenseChecker; +import org.elasticsearch.xpack.profiling.action.ProfilingUsageTransportAction; +import org.elasticsearch.xpack.profiling.action.TransportGetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.TransportGetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.TransportGetStatusAction; +import org.elasticsearch.xpack.profiling.action.TransportGetTopNFunctionsAction; +import org.elasticsearch.xpack.profiling.persistence.IndexStateResolver; +import org.elasticsearch.xpack.profiling.persistence.ProfilingDataStreamManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexTemplateRegistry; +import org.elasticsearch.xpack.profiling.rest.RestGetFlamegraphAction; +import org.elasticsearch.xpack.profiling.rest.RestGetStackTracesAction; +import org.elasticsearch.xpack.profiling.rest.RestGetStatusAction; +import org.elasticsearch.xpack.profiling.rest.RestGetTopNFunctionsAction; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java index fcdc116cab725..398a004edd448 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java index 0245df13f8fad..de2feb727a029 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java index 3db9b543bdb88..b8ee54f5f29e8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java index b6795294e7f06..ded99eec428f2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java similarity index 88% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java index b2a37b7cfa903..5bd2d82237fc3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java @@ -5,6 +5,6 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; public record Frame(String fileName, String functionName, int functionOffset, int lineNumber, boolean inline, boolean last) {} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java index 32273d56d0176..4674a2cb0e12f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.Strings; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java index 3719722ad2d62..6866281c8dbeb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index c851b372cb2db..e4ea3c1521d22 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java index 1fd87740d6292..6871cc9e296f2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java index 038a576cd77fc..be30c9662fddb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index 4cad1104f783b..532ad374c3c4b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java index 44c9c987fc6c7..1b31642d07be1 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.time.Instant; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java index 59132d45995e3..0d8f3aad27daa 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java index b11e74cbbf93d..5d7dc17cd348e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java index b8785bc607b18..b16ce6f43685f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java index aae6615114f43..29f3b66956d55 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java index 7d1c5bdbf66a3..8b97f1139d6ad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java index d694ffd2cbebc..5628b64ea67b7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java index 3a1cad38f7781..05367cc3fbaaf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java index 53962c1f93cee..dbc60aa47a235 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java index d346dd279f250..f8093091f56c5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; final class NumberUtils { private NumberUtils() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java index 115b165f3e791..1a6809774f7f6 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java index 1100c6b10c5f7..a479dca379c4a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseUtils; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java index 7e7b431759cd4..738a7a4e52ddb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java index b70807e472536..54401ce1d3a5a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Random; import java.util.random.RandomGenerator; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java index 5f7102c63d3d7..b3b2b0b8caea5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java index d24127824dafd..2a4e5f42fe657 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java similarity index 94% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java index c423fe12f3581..6197a0d6a0c4f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; /** * Measures time and logs it in milliseconds. diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java index 777d8e247335c..402d2ff012839 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java index adb88848a418e..f020ad9e6a905 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.HashMap; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java index 7a25319d3a1cc..4f3778081563b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index d7c9e61b73a3a..5467f0c10ccc8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -48,6 +48,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; import java.time.Duration; import java.time.Instant; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java similarity index 94% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index d918a0def7ebb..88f19a62bbedf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -34,6 +34,12 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; +import org.elasticsearch.xpack.profiling.persistence.IndexStateResolver; +import org.elasticsearch.xpack.profiling.persistence.ProfilingDataStreamManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexTemplateRegistry; public class TransportGetStatusAction extends TransportMasterNodeAction { private static final Logger log = LogManager.getLogger(TransportGetStatusAction.class); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java index cb5f2da6c3731..e5d67c0b005e2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -120,7 +120,7 @@ static GetTopNFunctionsResponse buildTopNFunctions(GetStackTracesResponse respon return builder.build(); } - private static class TopNFunctionsBuilder { + static class TopNFunctionsBuilder { private final Integer limit; private final HashMap topNFunctions; @@ -141,7 +141,7 @@ public GetTopNFunctionsResponse build() { sumTotalCount += topNFunction.getTotalCount(); } // limit at the end so global stats are independent of the limit - if (limit != null && limit > 0) { + if (limit != null && limit > 0 && limit < functions.size()) { functions = functions.subList(0, limit); } return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, functions); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java index d74eaa8c5650e..528d6f28a7115 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java index f246a34f3362d..b87f3345579aa 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java index c34858acf5986..81262e6d33cad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.index.Index; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java index a09d162c32967..b5efe66423679 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -21,12 +21,12 @@ import java.util.List; import java.util.Map; -class IndexStateResolver { +public class IndexStateResolver { private static final Logger logger = LogManager.getLogger(IndexStateResolver.class); private volatile boolean checkOutdatedIndices; - IndexStateResolver(boolean checkOutdatedIndices) { + public IndexStateResolver(boolean checkOutdatedIndices) { this.checkOutdatedIndices = checkOutdatedIndices; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java index 389c0de80cc5f..0dc3da7bc7f80 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; enum IndexStatus { CLOSED(false), diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java index b6ccc2cee91c9..138c2301fd636 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java index 722a7d1dbac63..331d93b066da5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; @@ -36,7 +36,7 @@ /** * Creates all data streams that are required for using Elastic Universal Profiling. */ -class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +public class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { public static final List PROFILING_DATASTREAMS; static { @@ -51,7 +51,12 @@ class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +public class ProfilingIndexManager extends AbstractProfilingPersistenceManager { // For testing public static final List PROFILING_INDICES = List.of( ProfilingIndex.regular( @@ -68,7 +68,12 @@ class ProfilingIndexManager extends AbstractProfilingPersistenceManager client.admin().indices().delete(req, l)); } - enum OnVersionBump { + public enum OnVersionBump { DELETE_OLD, KEEP_OLD } @@ -257,27 +262,27 @@ enum OnVersionBump { /** * An index that is used by Universal Profiling. */ - static class ProfilingIndex implements ProfilingIndexAbstraction { + public static class ProfilingIndex implements ProfilingIndexAbstraction { private final String namePrefix; private final int version; private final String generation; private final OnVersionBump onVersionBump; private final List migrations; - public static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump) { + static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump) { return regular(name, version, onVersionBump, null); } - public static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump, Migration.Builder builder) { + static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump, Migration.Builder builder) { List migrations = builder != null ? builder.build(version) : null; return new ProfilingIndex(name, version, null, onVersionBump, migrations); } - public static ProfilingIndex kv(String name, int version) { + static ProfilingIndex kv(String name, int version) { return kv(name, version, null); } - public static ProfilingIndex kv(String name, int version, Migration.Builder builder) { + static ProfilingIndex kv(String name, int version, Migration.Builder builder) { List migrations = builder != null ? builder.build(version) : null; // K/V indices will age automatically as per the ILM policy, and we won't force-upgrade them on version bumps return new ProfilingIndex(name, version, "000001", OnVersionBump.KEEP_OLD, migrations); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index e1698e71afab2..61d3010bddf77 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java index 3b1b2e1789ad1..c6c9309077a34 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java index ac7e9943b6566..4161f478bc2f3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java similarity index 93% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java index 331bab40cdacc..2d5cc7a71669c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStatusAction; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java index b9896418d7b79..9c23d31964b5b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; +import org.elasticsearch.xpack.profiling.action.GetTopNFunctionsAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java index 48cc535dbe7e4..a7b9a97b71acc 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java index 2982df317a38c..1b41f30c3df8e 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.lang.reflect.Array; import java.util.function.Consumer; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java index b6e743a0946dd..eaf6cf618eddb 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java index 50cfdd28a98fc..2bd6d66f82c54 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java index cfaa90b8adf85..70bb1abfc40ac 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java index 3ebd2ef6a8aeb..973f9ce3df820 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java index 5c24e295909bc..b6b1ecef666c9 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java index bd66645243a92..756636ef84f78 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java index d6b9438611114..5229a398b0367 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java similarity index 95% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java index 0b8a410f9bb66..649759ba0309d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java index b66b8a3db50f9..d7eda19e45fbf 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java index 0b37dcd154ca5..c2537edab6bbd 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java index 3e1bc4eba202d..0888133759f45 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java index 4f583b55f18f7..ee85c4b9cb01f 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java index f30fd18443550..9623415b41554 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java new file mode 100644 index 0000000000000..26c0f066dd092 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling.action; + +import org.elasticsearch.test.ESTestCase; + +public class TopNFunctionsBuilderTests extends ESTestCase { + public void testBuildFunctions() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(null); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + assertEquals(2, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + assertEquals(bar, response.getTopN().get(1)); + } + + public void testBuildFunctionsWithLimitSmallerThanAvailableFunctionCount() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(1); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + // total counts are independent of the limit + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + assertEquals(1, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + } + + public void testBuildFunctionsWithLimitHigherThanAvailableFunctionCount() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(5); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + // still limited to the available two functions + assertEquals(2, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + assertEquals(bar, response.getTopN().get(1)); + } + + private TopNFunction foo() { + TopNFunction foo = function("foo"); + foo.addSelfCount(5L); + foo.addTotalCount(10L); + foo.addSelfAnnualCO2Tons(1.0d); + foo.addTotalAnnualCO2Tons(2.0d); + foo.addSelfAnnualCostsUSD(32.2d); + foo.addTotalAnnualCostsUSD(64.4d); + return foo; + } + + private TopNFunction bar() { + TopNFunction bar = function("bar"); + bar.addSelfCount(2L); + bar.addTotalCount(4L); + bar.addSelfAnnualCO2Tons(0.5d); + bar.addTotalAnnualCO2Tons(1.0d); + bar.addSelfAnnualCostsUSD(16.0d); + bar.addTotalAnnualCostsUSD(32.0d); + return bar; + } + + private TopNFunction function(String name) { + return new TopNFunction(name, 3, false, 0, name, "main.c", 1, "demo"); + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java index e10892f0e73ce..46d8df0a91bbd 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java index 2eccfb45f5958..80962ac5064a5 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java index f248d8e27bd43..6e5ed79579a0f 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java index 4f943cbb62a7e..8de7c1c974785 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java index 87b8aed1811e2..f2245baafe0c0 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java index ae1aa7072510d..db3037e09763d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java index fb1051add3f1b..81d6ed15804b6 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java similarity index 94% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java index c37404c9209df..38a0c2fdf7e10 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -19,7 +19,7 @@ /** * A client that delegates to a verifying function for action/request/listener */ -public class VerifyingClient extends NoOpClient { +class VerifyingClient extends NoOpClient { private TriFunction, ActionRequest, ActionListener, ActionResponse> verifier = (a, r, l) -> { Assert.fail("verifier not set"); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java similarity index 87% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java index 695bd3be0ef79..d5cd50e65c019 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; @@ -17,6 +17,8 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; +import org.elasticsearch.xpack.profiling.action.GetStackTracesResponse; import org.junit.Before; import java.util.Collections; @@ -76,22 +78,15 @@ public void testPrepareParameterizedRequest() { assertThat(getStackTracesRequest.getCustomCostPerCoreHour(), is(0.083d)); assertThat(getStackTracesRequest.getQuery(), notNullValue(QueryBuilder.class)); executeCalled.set(true); - - GetStackTracesResponseBuilder responseBuilder = new GetStackTracesResponseBuilder(getStackTracesRequest); - responseBuilder.setSamplingRate(0.04d); - responseBuilder.setTotalFrames(523); - responseBuilder.setTotalSamples(3L); - - GetStackTracesResponse response = responseBuilder.build(); - assertNull(response.getStackTraces()); - assertNull(response.getStackFrames()); - assertNull(response.getExecutables()); - assertNull(response.getStackTraceEvents()); - assertEquals(response.getSamplingRate(), 0.04d, 0.0001d); - assertEquals(response.getTotalFrames(), 523); - assertEquals(response.getTotalSamples(), 3L); - - return response; + return new GetStackTracesResponse( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + 523, + 0.04d, + 3L + ); }); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) .withPath("/_profiling/stacktraces") diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 5023b5a4bf877..757fe411387d6 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; +import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; @@ -68,7 +69,15 @@ public Object parse(String line) { // read data String lower = line.toLowerCase(Locale.ROOT); if (lower.startsWith("warning:")) { + if (testCase.expectedWarningsRegex.isEmpty() == false) { + throw new IllegalArgumentException("Cannot mix warnings and regex warnings in CSV SPEC files: [" + line + "]"); + } testCase.expectedWarnings.add(line.substring("warning:".length()).trim()); + } else if (lower.startsWith("warningregex:")) { + if (testCase.expectedWarnings.isEmpty() == false) { + throw new IllegalArgumentException("Cannot mix warnings and regex warnings in CSV SPEC files: [" + line + "]"); + } + testCase.expectedWarningsRegex.add(Pattern.compile(".*" + line.substring("warningregex:".length()).trim() + ".*")); } else if (lower.startsWith("ignoreorder:")) { testCase.ignoreOrder = Boolean.parseBoolean(line.substring("ignoreOrder:".length()).trim()); } else if (line.startsWith(";")) { @@ -93,6 +102,7 @@ public static class CsvTestCase { public String earlySchema; public String expectedResults; private final List expectedWarnings = new ArrayList<>(); + private final List expectedWarningsRegex = new ArrayList<>(); public boolean ignoreOrder; public List requiredFeatures = List.of(); @@ -137,6 +147,10 @@ public List expectedWarnings(boolean forEmulated) { public void adjustExpectedWarnings(Function updater) { expectedWarnings.replaceAll(updater::apply); } + + public List expectedWarningsRegex() { + return expectedWarningsRegex; + } } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index 0cf6cb93c865b..207df0faddd07 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -449,9 +449,9 @@ public Map getRepositories( (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { if (enabled.get()) { - super.assertSnapshotOrGenericThread(); + super.assertSnapshotOrStatelessPermittedThreadPool(); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java index 6800dea01863a..4a15d00bc8168 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java @@ -248,7 +248,7 @@ public Map getRepositories( "test-fs", (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // ignore } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index 9c36d7b762871..c54ead2bdbc45 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -609,7 +609,7 @@ private void testDirectories( ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 8871dd773375e..4a667ed629c63 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -45,8 +45,8 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE; import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE_DESCRIPTOR; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField.RUN_AS_USER_HEADER; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java index 881d1340ebc3f..667b513555594 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java @@ -11,9 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -27,7 +25,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; -public abstract class TransportGrantAction extends HandledTransportAction< +public abstract class TransportGrantAction extends TransportAction< Request, Response> { @@ -39,12 +37,11 @@ public TransportGrantAction( String actionName, TransportService transportService, ActionFilters actionFilters, - Writeable.Reader requestReader, AuthenticationService authenticationService, AuthorizationService authorizationService, ThreadContext threadContext ) { - super(actionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, actionFilters, transportService.getTaskManager()); this.authenticationService = authenticationService; this.authorizationService = authorizationService; this.threadContext = threadContext; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java index 9d367bf5caf24..33b1e44004454 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java @@ -10,9 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -24,7 +22,7 @@ import java.util.Map; public abstract class TransportBaseUpdateApiKeyAction extends - HandledTransportAction { + TransportAction { private final SecurityContext securityContext; @@ -32,10 +30,9 @@ protected TransportBaseUpdateApiKeyAction( final String actionName, final TransportService transportService, final ActionFilters actionFilters, - final Writeable.Reader requestReader, final SecurityContext context ) { - super(actionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, actionFilters, transportService.getTaskManager()); this.securityContext = context; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java index cb8f6c861ecf7..3b978c3e44b4c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java @@ -38,7 +38,7 @@ public TransportBulkUpdateApiKeyAction( final CompositeRolesStore rolesStore, final NamedXContentRegistry xContentRegistry ) { - super(BulkUpdateApiKeyAction.NAME, transportService, actionFilters, BulkUpdateApiKeyRequest::new, context); + super(BulkUpdateApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java index 568e0fe5eb075..268afc7f0b32f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -28,7 +27,7 @@ /** * Implementation of the action needed to create an API key */ -public final class TransportCreateApiKeyAction extends HandledTransportAction { +public final class TransportCreateApiKeyAction extends TransportAction { private final ApiKeyService apiKeyService; private final ApiKeyUserRoleDescriptorResolver resolver; @@ -43,7 +42,7 @@ public TransportCreateApiKeyAction( CompositeRolesStore rolesStore, NamedXContentRegistry xContentRegistry ) { - super(CreateApiKeyAction.NAME, transportService, actionFilters, CreateApiKeyRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(CreateApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); this.securityContext = context; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java index 267a3aafe0c72..eeccd4b833a23 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -26,9 +25,7 @@ /** * Implementation of the action needed to create an API key */ -public final class TransportCreateCrossClusterApiKeyAction extends HandledTransportAction< - CreateCrossClusterApiKeyRequest, - CreateApiKeyResponse> { +public final class TransportCreateCrossClusterApiKeyAction extends TransportAction { private final ApiKeyService apiKeyService; private final SecurityContext securityContext; @@ -40,13 +37,7 @@ public TransportCreateCrossClusterApiKeyAction( ApiKeyService apiKeyService, SecurityContext context ) { - super( - CreateCrossClusterApiKeyAction.NAME, - transportService, - actionFilters, - CreateCrossClusterApiKeyRequest::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(CreateCrossClusterApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.securityContext = context; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java index a6401053634b2..54e073906b815 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java @@ -65,15 +65,7 @@ public TransportGrantApiKeyAction( ApiKeyService apiKeyService, ApiKeyUserRoleDescriptorResolver resolver ) { - super( - GrantApiKeyAction.NAME, - transportService, - actionFilters, - GrantApiKeyRequest::new, - authenticationService, - authorizationService, - threadContext - ); + super(GrantApiKeyAction.NAME, transportService, actionFilters, authenticationService, authorizationService, threadContext); this.apiKeyService = apiKeyService; this.resolver = resolver; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java index 2427b571cf575..b6e0854d6c443 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java @@ -37,7 +37,7 @@ public TransportUpdateApiKeyAction( final CompositeRolesStore rolesStore, final NamedXContentRegistry xContentRegistry ) { - super(UpdateApiKeyAction.NAME, transportService, actionFilters, UpdateApiKeyRequest::new, context); + super(UpdateApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java index a47bbb0301ebc..f4578bf7a737c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java @@ -37,7 +37,7 @@ public TransportUpdateCrossClusterApiKeyAction( final ApiKeyService apiKeyService, final SecurityContext context ) { - super(UpdateCrossClusterApiKeyAction.NAME, transportService, actionFilters, UpdateCrossClusterApiKeyRequest::new, context); + super(UpdateCrossClusterApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java index d7241011d9c09..4d76205d29021 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java @@ -39,7 +39,6 @@ public TransportActivateProfileAction( ActivateProfileAction.NAME, transportService, actionFilters, - ActivateProfileRequest::new, authenticationService, authorizationService, threadPool.getThreadContext() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java index f94acab50b6b5..9c1419f67bcf0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Set; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java index 70190b70f3f1a..1525b9157a610 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java @@ -33,7 +33,7 @@ import java.util.Set; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index d86f99c19ffa7..5870aa99e2dbc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -158,7 +158,6 @@ import static org.elasticsearch.test.SecurityIntegTestCase.getFastStoredHashAlgoForTests; import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_ID_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_METADATA_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_TYPE_KEY; @@ -200,6 +199,29 @@ public class ApiKeyServiceTests extends ESTestCase { + private static final List ACCESS_CANDIDATES = List.of(""" + { + "search": [ {"names": ["logs"]} ] + }""", """ + { + "search": [ {"names": ["logs"], "query": "abc" } ] + }""", """ + { + "search": [ {"names": ["logs"], "field_security": {"grant": ["*"], "except": ["private"]} } ] + }""", """ + { + "search": [ {"names": ["logs"], "query": "abc", "field_security": {"grant": ["*"], "except": ["private"]} } ] + }""", """ + { + "replication": [ {"names": ["archive"], "allow_restricted_indices": true } ] + }""", """ + { + "replication": [ {"names": ["archive"]} ] + }""", """ + { + "search": [ {"names": ["logs"]} ], + "replication": [ {"names": ["archive"]} ] + }"""); private ThreadPool threadPool; private Client client; private SecurityIndexManager securityIndex; @@ -2847,6 +2869,10 @@ private static RoleDescriptor randomRoleDescriptorWithWorkflowsRestriction() { ); } + public static String randomCrossClusterApiKeyAccessField() { + return randomFrom(ACCESS_CANDIDATES); + } + public static class Utils { private static final AuthenticationContextSerializer authenticationContextSerializer = new AuthenticationContextSerializer(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index f9fa9269c4ef1..ddeffc0675498 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -30,7 +30,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java index 486dd7c581032..98cf817d6c018 100644 --- a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -122,6 +122,6 @@ protected void logAudits() throws Exception { protected void refreshIndex(String index) throws IOException { Request refreshRequest = new Request("POST", index + "/_refresh"); - assertOK(adminClient().performRequest(refreshRequest)); + assertOKAndConsume(adminClient().performRequest(refreshRequest)); } } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java index 600ceb3cd8202..4d9a9e7705052 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java @@ -188,7 +188,7 @@ private void testChainedTransforms(final int numTransforms) throws Exception { assertFalse(aliasExists(destWriteAlias)); String transformConfig = createTransformConfig(sourceIndex, destIndex, destReadAlias, destWriteAlias); - assertAcknowledged(putTransform(transformId, transformConfig, true, RequestOptions.DEFAULT)); + putTransform(transformId, transformConfig, true, RequestOptions.DEFAULT); } List transformIdsShuffled = new ArrayList<>(transformIds); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index e7d54028caa20..4db0d0d8baaf1 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -241,38 +241,39 @@ public void testTransformLifecycleInALoop() throws Exception { long sleepAfterStartMillis = randomLongBetween(0, 5_000); boolean force = randomBoolean(); try { - // Create the continuous transform + // Create the continuous transform. putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId, RequestOptions.DEFAULT); - // There is 1 transform task after start + // There is 1 transform task after start. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); Thread.sleep(sleepAfterStartMillis); - // There should still be 1 transform task as the transform is continuous + // There should still be 1 transform task as the transform is continuous. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); - // Stop the transform with force set randomly + // Stop the transform with force set randomly. stopTransform(transformId, true, null, false, force); - // After the transform is stopped, there should be no transform task left - assertThat(getTransformTasks(), is(empty())); + if (force) { + // If the "force" has been used, then the persistent task is removed from the cluster state but the local task can still + // be seen by the PersistentTasksNodeService. We need to wait until PersistentTasksNodeService reconciles the state. + assertBusy(() -> assertThat(getTransformTasks(), is(empty()))); + } else { + // If the "force" hasn't been used then we can expect the local task to be already gone. + assertThat(getTransformTasks(), is(empty())); + } + // After the transform is stopped, there should be no transform task left in the cluster state. assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { throw new AssertionError( - format( - "Failure at iteration %d (sleepAfterStartMillis=%s,force=%s): %s", - i, - sleepAfterStartMillis, - force, - e.getMessage() - ), + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), e ); } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index eb1a1258d5a96..4b7e478dbb61d 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -10,6 +10,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -65,9 +66,6 @@ public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static final String AUTH_KEY = "Authorization"; - protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; - private final Set createdTransformIds = new HashSet<>(); protected void cleanUp() throws Exception { @@ -171,15 +169,15 @@ protected void deleteTransform(String id, boolean force) throws IOException { if (force) { request.addParameter(TransformField.FORCE.getPreferredName(), "true"); } - assertOK(adminClient().performRequest(request)); + assertAcknowledged(adminClient().performRequest(request)); createdTransformIds.remove(id); } - protected Response putTransform(String id, String config, RequestOptions options) throws IOException { - return putTransform(id, config, false, options); + protected void putTransform(String id, String config, RequestOptions options) throws IOException { + putTransform(id, config, false, options); } - protected Response putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { + protected void putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { if (createdTransformIds.contains(id)) { throw new IllegalArgumentException("transform [" + id + "] is already registered"); } @@ -190,9 +188,8 @@ protected Response putTransform(String id, String config, boolean deferValidatio request.addParameter("defer_validation", "true"); } request.setOptions(options); - Response response = assertOK(client().performRequest(request)); + assertAcknowledged(client().performRequest(request)); createdTransformIds.add(id); - return response; } protected Map previewTransform(String transformConfig, RequestOptions options) throws IOException { @@ -217,8 +214,7 @@ protected Map getBasicTransformStats(String id) throws IOExcepti var request = new Request("GET", TRANSFORM_ENDPOINT + id + "/_stats"); request.addParameter(BASIC_STATS.getPreferredName(), "true"); request.setOptions(RequestOptions.DEFAULT); - Response response = client().performRequest(request); - List> stats = (List>) XContentMapValues.extractValue("transforms", entityAsMap(response)); + var stats = (List>) XContentMapValues.extractValue("transforms", entityAsMap(client().performRequest(request))); assertThat(stats, hasSize(1)); return stats.get(0); } @@ -229,11 +225,10 @@ protected String getTransformState(String id) throws IOException { @SuppressWarnings("unchecked") protected Map getTransform(String id) throws IOException { - Request request = new Request("GET", TRANSFORM_ENDPOINT + id); - Response response = client().performRequest(request); - List> transformConfigs = (List>) XContentMapValues.extractValue( + var request = new Request("GET", TRANSFORM_ENDPOINT + id); + var transformConfigs = (List>) XContentMapValues.extractValue( "transforms", - entityAsMap(response) + entityAsMap(client().performRequest(request)) ); assertThat(transformConfigs, hasSize(1)); return transformConfigs.get(0); @@ -260,14 +255,6 @@ protected long getCheckpoint(Map stats) { return ((Integer) XContentMapValues.extractValue("checkpointing.last.checkpoint", stats)).longValue(); } - protected DateHistogramGroupSource createDateHistogramGroupSourceWithFixedInterval( - String field, - DateHistogramInterval interval, - ZoneId zone - ) { - return new DateHistogramGroupSource(field, null, false, new DateHistogramGroupSource.FixedInterval(interval), zone, null); - } - protected DateHistogramGroupSource createDateHistogramGroupSourceWithCalendarInterval( String field, DateHistogramInterval interval, @@ -360,7 +347,7 @@ protected TransformConfig.Builder createTransformConfigBuilder( String destinationIndex, QueryConfig queryConfig, String... sourceIndices - ) throws Exception { + ) { return TransformConfig.builder() .setId(id) .setSource(new SourceConfig(sourceIndices, queryConfig, Collections.emptyMap())) @@ -380,7 +367,7 @@ protected void updateConfig(String id, String update, boolean deferValidation, R } updateRequest.setJsonEntity(update); updateRequest.setOptions(options); - assertOK(client().performRequest(updateRequest)); + assertOKAndConsume(client().performRequest(updateRequest)); } protected void createReviewsIndex( @@ -450,7 +437,7 @@ protected void createReviewsIndex( Request req = new Request("PUT", indexName); req.setEntity(indexMappings); req.setOptions(RequestOptions.DEFAULT); - assertOK(adminClient().performRequest(req)); + assertAcknowledged(adminClient().performRequest(req)); } // create index @@ -492,9 +479,12 @@ protected void doBulk(String bulkDocuments, boolean refresh) throws IOException bulkRequest.setJsonEntity(bulkDocuments); bulkRequest.setOptions(RequestOptions.DEFAULT); Response bulkResponse = adminClient().performRequest(bulkRequest); - assertOK(bulkResponse); - var bulkMap = entityAsMap(bulkResponse); - assertThat((boolean) bulkMap.get("errors"), is(equalTo(false))); + try { + var bulkMap = entityAsMap(assertOK(bulkResponse)); + assertThat((boolean) bulkMap.get("errors"), is(equalTo(false))); + } finally { + EntityUtils.consumeQuietly(bulkResponse.getEntity()); + } } protected Map matchAllSearch(String index, int size, RequestOptions options) throws IOException { @@ -502,8 +492,11 @@ protected Map matchAllSearch(String index, int size, RequestOpti request.addParameter("size", Integer.toString(size)); request.setOptions(options); Response response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + try { + return entityAsMap(assertOK(response)); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } } private void waitForPendingTasks() { @@ -518,7 +511,7 @@ private void waitForPendingTasks() { ); request.addParameters(parameters); try { - adminClient().performRequest(request); + EntityUtils.consumeQuietly(adminClient().performRequest(request).getEntity()); } catch (Exception e) { throw new AssertionError("Failed to wait for pending tasks to complete", e); } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index 0f807fbae45d1..4b7c42968f557 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.Strings; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; @@ -19,6 +18,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -86,10 +86,10 @@ public void testTaskRemovalAfterInternalIndexGotDeleted() throws Exception { deleteTransform(transformId); } - public void testCreateAndDeleteTransformInALoop() throws IOException { + public void testBatchTransformLifecycltInALoop() throws IOException { createReviewsIndex(); - String transformId = "test_create_and_delete_in_a_loop"; + String transformId = "test_batch_lifecycle_in_a_loop"; String destIndex = transformId + "-dest"; for (int i = 0; i < 100; ++i) { try { @@ -108,7 +108,48 @@ public void testCreateAndDeleteTransformInALoop() throws IOException { // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { - fail("Failure at iteration " + i + ": " + e.getMessage()); + throw new AssertionError(format("Failure at iteration %d: %s", i, e.getMessage()), e); + } + } + } + + public void testContinuousTransformLifecycleInALoop() throws Exception { + createReviewsIndex(); + + String transformId = "test_cont_lifecycle_in_a_loop"; + String destIndex = transformId + "-dest"; + for (int i = 0; i < 100; ++i) { + long sleepAfterStartMillis = randomLongBetween(0, 5_000); + boolean force = randomBoolean(); + try { + // Create the continuous transform. + createContinuousPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + startTransform(transformId); + // There is 1 transform task after start. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + Thread.sleep(sleepAfterStartMillis); + // There should still be 1 transform task as the transform is continuous. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + // Stop the transform with force set randomly. + stopTransform(transformId, force); + // After the transform is stopped, there should be no transform task left. + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + // Delete the transform. + deleteTransform(transformId); + } catch (AssertionError | Exception e) { + throw new AssertionError( + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), + e + ); } } } @@ -168,7 +209,7 @@ private void beEvilAndDeleteTheTransformIndex() throws IOException { } private static String createConfig(String sourceIndex, String destIndex) { - return Strings.format(""" + return format(""" { "source": { "index": "%s" diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java index bccd97f22b4a1..5ab65ca023506 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java @@ -65,6 +65,7 @@ public void testForceStopFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -78,6 +79,7 @@ public void testForceStopFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // verify that we cannot stop a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> stopTransform(transformId, false)); @@ -99,6 +101,7 @@ public void testForceStopFailedTransform() throws Exception { assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } public void testForceResetFailedTransform() throws Exception { @@ -109,6 +112,7 @@ public void testForceResetFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -122,6 +126,7 @@ public void testForceResetFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // verify that we cannot reset a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> resetTransform(transformId, false)); @@ -135,6 +140,7 @@ public void testForceResetFailedTransform() throws Exception { resetTransform(transformId, true); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } public void testStartFailedTransform() throws Exception { @@ -145,6 +151,7 @@ public void testStartFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -158,6 +165,7 @@ public void testStartFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); var expectedFailure = "Unable to start transform [test-force-start-failed-transform] " + "as it is in a failed state. Use force stop and then restart the transform once error is resolved. More details: [" @@ -172,6 +180,7 @@ public void testStartFailedTransform() throws Exception { stopTransform(transformId, true); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } private void awaitState(String transformId, TransformStats.State state) throws Exception { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java index e2f66fe914bc2..970403e49c5a3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -97,7 +97,7 @@ private static void createAuditAliasForDataFrameBWC(ClusterState state, Client c client.threadPool().getThreadContext(), TRANSFORM_ORIGIN, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure), + ActionListener.wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure), client.admin().indices()::aliases ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index b8ea1fee6e886..1996012ccdf58 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -164,18 +164,23 @@ protected void doExecute(Task task, Request request, ActionListener li state ); - final ActionListener doExecuteListener; - if (transformNodeAssignments.getWaitingForAssignment().size() > 0) { - doExecuteListener = cancelTransformTasksWithNoAssignment(finalListener, transformNodeAssignments); - } else { - doExecuteListener = finalListener; - } + final ActionListener doExecuteListener = cancelTransformTasksListener( + transformNodeAssignments.getWaitingForAssignment(), + finalListener + ); - if (transformNodeAssignments.getExecutorNodes().size() > 0) { + if (request.isForce()) { + // When force==true, we **do not** fan out to individual tasks (i.e. taskOperation method will not be called) as we + // want to make sure that the persistent tasks will be removed from cluster state even if these tasks are no longer + // visible by the PersistentTasksService. + cancelTransformTasksListener(transformNodeAssignments.getAssigned(), doExecuteListener).onResponse( + new Response(true) + ); + } else if (transformNodeAssignments.getExecutorNodes().isEmpty()) { + doExecuteListener.onResponse(new Response(true)); + } else { request.setNodes(transformNodeAssignments.getExecutorNodes().toArray(new String[0])); super.doExecute(task, request, doExecuteListener); - } else { - doExecuteListener.onResponse(new Response(true)); } }, e -> { if (e instanceof ResourceNotFoundException) { @@ -189,13 +194,10 @@ protected void doExecute(Task task, Request request, ActionListener li listener.onFailure(e); // found transforms without a config } else if (request.isForce()) { - final ActionListener doExecuteListener; - - if (transformNodeAssignments.getWaitingForAssignment().size() > 0) { - doExecuteListener = cancelTransformTasksWithNoAssignment(finalListener, transformNodeAssignments); - } else { - doExecuteListener = finalListener; - } + final ActionListener doExecuteListener = cancelTransformTasksListener( + transformNodeAssignments.getWaitingForAssignment(), + finalListener + ); if (transformNodeAssignments.getExecutorNodes().size() > 0) { request.setExpandedIds(transformNodeAssignments.getAssigned()); @@ -235,7 +237,6 @@ protected void taskOperation( TransformTask transformTask, ActionListener listener ) { - Set ids = request.getExpandedIds(); if (ids == null) { listener.onFailure(new IllegalStateException("Request does not have expandedIds set")); @@ -243,20 +244,6 @@ protected void taskOperation( } if (ids.contains(transformTask.getTransformId())) { - if (request.isForce()) { - // If force==true, we skip the additional step (setShouldStopAtCheckpoint) and move directly to shutting down the task. - // This way we ensure that the persistent task is removed ASAP (as opposed to being removed in one of the listeners). - try { - // Here the task is deregistered in scheduler and marked as completed in persistent task service. - transformTask.shutdown(); - // Here the indexer is aborted so that its thread finishes work ASAP. - transformTask.onCancelled(); - listener.onResponse(new Response(true)); - } catch (ElasticsearchException ex) { - listener.onFailure(ex); - } - return; - } // move the call to the generic thread pool, so we do not block the network thread threadPool.generic().execute(() -> { transformTask.setShouldStopAtCheckpoint(request.isWaitForCheckpoint(), ActionListener.wrap(r -> { @@ -306,7 +293,6 @@ protected StopTransformAction.Response newResponse( } private ActionListener waitForStopListener(Request request, ActionListener listener) { - ActionListener onStopListener = ActionListener.wrap( waitResponse -> transformConfigManager.refresh(ActionListener.wrap(r -> listener.onResponse(waitResponse), e -> { if ((ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) == false) { @@ -393,6 +379,7 @@ private void waitForTransformStopped( ) { // This map is accessed in the predicate and the listener callbacks final Map exceptions = new ConcurrentHashMap<>(); + persistentTasksService.waitForPersistentTasksCondition(persistentTasksCustomMetadata -> { if (persistentTasksCustomMetadata == null) { return true; @@ -501,34 +488,38 @@ private void waitForTransformStopped( })); } - private ActionListener cancelTransformTasksWithNoAssignment( - final ActionListener finalListener, - final TransformNodeAssignments transformNodeAssignments + /** + * Creates and returns the listener that sends remove request for every task in the given set. + * + * @param transformTasks set of transform tasks that should be removed + * @param finalListener listener that should be called once all the given tasks are removed + * @return listener that removes given tasks in parallel + */ + private ActionListener cancelTransformTasksListener( + final Set transformTasks, + final ActionListener finalListener ) { - final ActionListener doExecuteListener = ActionListener.wrap(response -> { + if (transformTasks.isEmpty()) { + return finalListener; + } + return ActionListener.wrap(response -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( - transformNodeAssignments.getWaitingForAssignment().size(), - ActionListener.wrap(r -> { - finalListener.onResponse(response); - }, finalListener::onFailure) + transformTasks.size(), + ActionListener.wrap(r -> finalListener.onResponse(response), finalListener::onFailure) ); - for (String unassignedTaskId : transformNodeAssignments.getWaitingForAssignment()) { - persistentTasksService.sendRemoveRequest(unassignedTaskId, null, groupedListener); + for (String taskId : transformTasks) { + persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); } - }, e -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( - transformNodeAssignments.getWaitingForAssignment().size(), - ActionListener.wrap(r -> { - finalListener.onFailure(e); - }, finalListener::onFailure) + transformTasks.size(), + ActionListener.wrap(r -> finalListener.onFailure(e), finalListener::onFailure) ); - for (String unassignedTaskId : transformNodeAssignments.getWaitingForAssignment()) { - persistentTasksService.sendRemoveRequest(unassignedTaskId, null, groupedListener); + for (String taskId : transformTasks) { + persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); } }); - return doExecuteListener; } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java index 7b61f0c9e8335..46f893a90aba1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java @@ -65,4 +65,18 @@ public Set getWaitingForAssignment() { public Set getStopped() { return stopped; } + + @Override + public String toString() { + return new StringBuilder("TransformNodeAssignments[").append("executorNodes=") + .append(executorNodes) + .append(",assigned=") + .append(assigned) + .append(",waitingForAssignment=") + .append(waitingForAssignment) + .append(",stopped=") + .append(stopped) + .append("]") + .toString(); + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java index 17548fd8d427f..33b20d5513bc5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java @@ -22,7 +22,7 @@ class TransformRetryableStartUpListener implements TransformScheduler. private final Supplier shouldRetry; private final TransformContext context; private final AtomicBoolean isFirstRun; - private final AtomicBoolean isRunning; + private final AtomicBoolean shouldRunAction; /** * @param transformId the transform associated with this listener. All events to this listener must be for the same transformId. @@ -53,30 +53,28 @@ class TransformRetryableStartUpListener implements TransformScheduler. this.shouldRetry = shouldRetry; this.context = context; this.isFirstRun = new AtomicBoolean(true); - this.isRunning = new AtomicBoolean(true); + this.shouldRunAction = new AtomicBoolean(true); } @Override public void triggered(TransformScheduler.Event event) { - if (isRunning.get() && transformId.equals(event.transformId())) { + if (transformId.equals(event.transformId()) && shouldRunAction.compareAndSet(true, false)) { action.accept(ActionListener.wrap(this::actionSucceeded, this::actionFailed)); } } - private void markDone() { - if (isRunning.compareAndSet(true, false)) { - synchronized (context) { - context.resetStartUpFailureCount(); - } - } - } - private void actionSucceeded(Response r) { maybeNotifyRetryListener(false); markDone(); actionListener.onResponse(r); } + private void markDone() { + synchronized (context) { + context.resetStartUpFailureCount(); + } + } + private void maybeNotifyRetryListener(boolean response) { if (isFirstRun.compareAndSet(true, false)) { retryScheduledListener.onResponse(response); @@ -87,6 +85,7 @@ private void actionFailed(Exception e) { if (shouldRetry.get()) { maybeNotifyRetryListener(true); recordError(e); + shouldRunAction.set(true); } else { maybeNotifyRetryListener(false); markDone(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java index f5c0b6046fbfe..2643d1bba652d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java @@ -9,8 +9,6 @@ import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -19,10 +17,11 @@ public class TransformNodeAssignmentsTests extends ESTestCase { public void testConstructorAndGetters() { - Set executorNodes = new HashSet<>(Arrays.asList("executor-1", "executor-2")); - Set assigned = new HashSet<>(Arrays.asList("assigned-1", "assigned-2")); - Set waitingForAssignment = new HashSet<>(Arrays.asList("waiting-1", "waitingv-2")); - Set stopped = new HashSet<>(Arrays.asList("stopped-1", "stopped-2")); + Set executorNodes = Set.of("executor-1", "executor-2"); + Set assigned = Set.of("assigned-1", "assigned-2"); + Set waitingForAssignment = Set.of("waiting-1", "waiting-2"); + Set stopped = Set.of("stopped-1", "stopped-2"); + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); assertThat(assignments.getExecutorNodes(), is(equalTo(executorNodes))); @@ -30,4 +29,45 @@ public void testConstructorAndGetters() { assertThat(assignments.getWaitingForAssignment(), is(equalTo(waitingForAssignment))); assertThat(assignments.getStopped(), is(equalTo(stopped))); } + + public void testToString() { + Set executorNodes = Set.of("executor-1"); + Set assigned = Set.of("assigned-1"); + Set waitingForAssignment = Set.of("waiting-1"); + Set stopped = Set.of("stopped-1"); + + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); + + assertThat( + assignments.toString(), + is( + equalTo( + "TransformNodeAssignments[" + + "executorNodes=[executor-1]," + + "assigned=[assigned-1]," + + "waitingForAssignment=[waiting-1]," + + "stopped=[stopped-1]" + + "]" + ) + ) + ); + } + + public void testToString_EmptyCollections() { + Set executorNodes = Set.of(); + Set assigned = Set.of(); + Set waitingForAssignment = Set.of(); + Set stopped = Set.of(); + + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); + + assertThat( + assignments.toString(), + is( + equalTo( + "TransformNodeAssignments[" + "executorNodes=[]," + "assigned=[]," + "waitingForAssignment=[]," + "stopped=[]" + "]" + ) + ) + ); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java index 1a2bbfd434455..77b290e015d9a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java @@ -18,6 +18,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.only; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -236,4 +237,67 @@ public void testCancelRetryImmediately() { assertFalse("Retries should not be scheduled.", retryResult.get()); verify(context, only()).resetStartUpFailureCount(); } + + /** + * Given triggered has been called + * When we call trigger a second time + * And the first call has not finished + * Then we should not take any action + * + * Given the first call has finished + * When we call trigger a third time + * Then we should successfully call the action + */ + public void testRunOneAtATime() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var savedListener = new AtomicReference>(); + Consumer> action = l -> { + if (savedListener.compareAndSet(null, l) == false) { + fail("Action should only be called once."); + } + }; + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + action, + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // verify the action has been called + assertNotNull(savedListener.get()); + + // assert the listener has not been called yet + assertEquals("Response Listener should never be called once.", 0, responseResult.get()); + assertNull("Retry Listener should not be called.", retryResult.get()); + verifyNoInteractions(context); + + savedListener.get().onFailure(new IllegalStateException("first call fails")); + + // assert only 1 retry and 0 success + assertEquals("Response Listener should only be called once.", 0, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, never()).resetStartUpFailureCount(); + + // rerun and succeed + savedListener.set(null); + callThreeTimes("transformId", listener); + savedListener.get().onResponse(null); + + // assert only 1 retry and 1 failure + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } }