diff --git a/.buildkite/.editorconfig b/.buildkite/.editorconfig new file mode 100644 index 0000000000000..a08b2c5600ad8 --- /dev/null +++ b/.buildkite/.editorconfig @@ -0,0 +1,2 @@ +[*.ts] +max_line_length = 120 diff --git a/.buildkite/.gitignore b/.buildkite/.gitignore new file mode 100644 index 0000000000000..f81d56eaa35f6 --- /dev/null +++ b/.buildkite/.gitignore @@ -0,0 +1,169 @@ +# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore + +# Logs + +logs +_.log +npm-debug.log_ +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) + +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# Runtime data + +pids +_.pid +_.seed +\*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover + +lib-cov + +# Coverage directory used by tools like istanbul + +coverage +\*.lcov + +# nyc test coverage + +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) + +.grunt + +# Bower dependency directory (https://bower.io/) + +bower_components + +# node-waf configuration + +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) + +build/Release + +# Dependency directories + +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) + +web_modules/ + +# TypeScript cache + +\*.tsbuildinfo + +# Optional npm cache directory + +.npm + +# Optional eslint cache + +.eslintcache + +# Optional stylelint cache + +.stylelintcache + +# Microbundle cache + +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history + +.node_repl_history + +# Output of 'npm pack' + +\*.tgz + +# Yarn Integrity file + +.yarn-integrity + +# dotenv environment variable files + +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) + +.cache +.parcel-cache + +# Next.js build output + +.next +out + +# Nuxt.js build / generate output + +.nuxt +dist + +# Gatsby files + +.cache/ + +# Comment in the public line in if your project uses Gatsby and not Next.js + +# https://nextjs.org/blog/next-9-1#public-directory-support + +# public + +# vuepress build output + +.vuepress/dist + +# vuepress v2.x temp and cache directory + +.temp +.cache + +# Docusaurus cache and generated files + +.docusaurus + +# Serverless directories + +.serverless/ + +# FuseBox cache + +.fusebox/ + +# DynamoDB Local files + +.dynamodb/ + +# TernJS port file + +.tern-port + +# Stores VSCode versions used for testing VSCode extensions + +.vscode-test + +# yarn v2 + +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.\* diff --git a/.buildkite/bun.lockb b/.buildkite/bun.lockb new file mode 100755 index 0000000000000..54920b41d665c Binary files /dev/null and b/.buildkite/bun.lockb differ diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 3d20e3fb73b8e..40fb970a76196 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -48,7 +48,7 @@ BUILDKITE_API_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/bu export BUILDKITE_API_TOKEN if [[ "${USE_LUCENE_SNAPSHOT_CREDS:-}" == "true" ]]; then - data=$(.buildkite/scripts/lucene-snapshot/get-credentials.sh) + data=$(.buildkite/scripts/get-legacy-secret.sh aws-elastic/creds/lucene-snapshots) AWS_ACCESS_KEY_ID=$(echo "$data" | jq -r .data.access_key) export AWS_ACCESS_KEY_ID @@ -70,12 +70,38 @@ if [[ "${USE_DRA_CREDENTIALS:-}" == "true" ]]; then export DRA_VAULT_ADDR fi +source .buildkite/scripts/third-party-test-credentials.sh + if [[ "${USE_SNYK_CREDENTIALS:-}" == "true" ]]; then SNYK_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/migrated/snyk) export SNYK_TOKEN fi +if [[ "${USE_PROD_DOCKER_CREDENTIALS:-}" == "true" ]]; then + DOCKER_REGISTRY_USERNAME="$(vault read -field=username secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)" + export DOCKER_REGISTRY_USERNAME + + DOCKER_REGISTRY_PASSWORD="$(vault read -field=password secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)" + export DOCKER_REGISTRY_PASSWORD +fi + if [[ "$BUILDKITE_AGENT_META_DATA_PROVIDER" != *"k8s"* ]]; then # Run in the background, while the job continues nohup .buildkite/scripts/setup-monitoring.sh /dev/null 2>&1 & fi + +# Initialize the build scan and gobld annotations with empty/open
tags +# This ensures that they are collapsible when they get appended to +if [[ "${BUILDKITE_LABEL:-}" == *"Pipeline upload"* ]]; then + cat << EOF | buildkite-agent annotate --context "gradle-build-scans" --style "info" +
+ +Gradle build scan links +EOF + + cat << EOF | buildkite-agent annotate --context "ctx-gobld-metrics" --style "info" +
+ +Agent information from gobld +EOF +fi diff --git a/.buildkite/package.json b/.buildkite/package.json new file mode 100644 index 0000000000000..c13d5f10fdf60 --- /dev/null +++ b/.buildkite/package.json @@ -0,0 +1,13 @@ +{ + "name": "buildkite-pipelines", + "module": "index.ts", + "type": "module", + "devDependencies": { + "@types/node": "^20.6.0", + "bun-types": "latest", + "yaml": "^2.3.2" + }, + "peerDependencies": { + "typescript": "^5.0.0" + } +} diff --git a/.buildkite/pipelines/periodic-packaging.bwc.template.yml b/.buildkite/pipelines/periodic-packaging.bwc.template.yml index 0ec7721381d07..b06bc80d3535d 100644 --- a/.buildkite/pipelines/periodic-packaging.bwc.template.yml +++ b/.buildkite/pipelines/periodic-packaging.bwc.template.yml @@ -1,5 +1,5 @@ - label: "{{matrix.image}} / $BWC_VERSION / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION timeout_in_minutes: 300 matrix: setup: diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 1f1852639e997..1c626ffc53bfe 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -2,7 +2,7 @@ steps: - group: packaging-tests-unix steps: - label: "{{matrix.image}} / packaging-tests-unix" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructivePackagingTest + command: ./.ci/scripts/packaging-test.sh destructivePackagingTest timeout_in_minutes: 300 matrix: setup: diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index ce0746a5726cc..6a2492fb03ef9 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -3,7 +3,7 @@ steps: - group: packaging-tests-unix steps: - label: "{{matrix.image}} / packaging-tests-unix" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructivePackagingTest + command: ./.ci/scripts/packaging-test.sh destructivePackagingTest timeout_in_minutes: 300 matrix: setup: @@ -33,7 +33,7 @@ steps: - group: packaging-tests-upgrade steps: - label: "{{matrix.image}} / 7.0.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.0 timeout_in_minutes: 300 matrix: setup: @@ -49,7 +49,7 @@ steps: BWC_VERSION: 7.0.0 - label: "{{matrix.image}} / 7.0.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.1 timeout_in_minutes: 300 matrix: setup: @@ -65,7 +65,7 @@ steps: BWC_VERSION: 7.0.1 - label: "{{matrix.image}} / 7.1.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.0 timeout_in_minutes: 300 matrix: setup: @@ -81,7 +81,7 @@ steps: BWC_VERSION: 7.1.0 - label: "{{matrix.image}} / 7.1.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.1 timeout_in_minutes: 300 matrix: setup: @@ -97,7 +97,7 @@ steps: BWC_VERSION: 7.1.1 - label: "{{matrix.image}} / 7.2.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.0 timeout_in_minutes: 300 matrix: setup: @@ -113,7 +113,7 @@ steps: BWC_VERSION: 7.2.0 - label: "{{matrix.image}} / 7.2.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.1 timeout_in_minutes: 300 matrix: setup: @@ -129,7 +129,7 @@ steps: BWC_VERSION: 7.2.1 - label: "{{matrix.image}} / 7.3.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.0 timeout_in_minutes: 300 matrix: setup: @@ -145,7 +145,7 @@ steps: BWC_VERSION: 7.3.0 - label: "{{matrix.image}} / 7.3.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.1 timeout_in_minutes: 300 matrix: setup: @@ -161,7 +161,7 @@ steps: BWC_VERSION: 7.3.1 - label: "{{matrix.image}} / 7.3.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.2 timeout_in_minutes: 300 matrix: setup: @@ -177,7 +177,7 @@ steps: BWC_VERSION: 7.3.2 - label: "{{matrix.image}} / 7.4.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.0 timeout_in_minutes: 300 matrix: setup: @@ -193,7 +193,7 @@ steps: BWC_VERSION: 7.4.0 - label: "{{matrix.image}} / 7.4.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.1 timeout_in_minutes: 300 matrix: setup: @@ -209,7 +209,7 @@ steps: BWC_VERSION: 7.4.1 - label: "{{matrix.image}} / 7.4.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.2 timeout_in_minutes: 300 matrix: setup: @@ -225,7 +225,7 @@ steps: BWC_VERSION: 7.4.2 - label: "{{matrix.image}} / 7.5.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.0 timeout_in_minutes: 300 matrix: setup: @@ -241,7 +241,7 @@ steps: BWC_VERSION: 7.5.0 - label: "{{matrix.image}} / 7.5.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.1 timeout_in_minutes: 300 matrix: setup: @@ -257,7 +257,7 @@ steps: BWC_VERSION: 7.5.1 - label: "{{matrix.image}} / 7.5.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.2 timeout_in_minutes: 300 matrix: setup: @@ -273,7 +273,7 @@ steps: BWC_VERSION: 7.5.2 - label: "{{matrix.image}} / 7.6.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.0 timeout_in_minutes: 300 matrix: setup: @@ -289,7 +289,7 @@ steps: BWC_VERSION: 7.6.0 - label: "{{matrix.image}} / 7.6.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.1 timeout_in_minutes: 300 matrix: setup: @@ -305,7 +305,7 @@ steps: BWC_VERSION: 7.6.1 - label: "{{matrix.image}} / 7.6.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.2 timeout_in_minutes: 300 matrix: setup: @@ -321,7 +321,7 @@ steps: BWC_VERSION: 7.6.2 - label: "{{matrix.image}} / 7.7.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.0 timeout_in_minutes: 300 matrix: setup: @@ -337,7 +337,7 @@ steps: BWC_VERSION: 7.7.0 - label: "{{matrix.image}} / 7.7.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.1 timeout_in_minutes: 300 matrix: setup: @@ -353,7 +353,7 @@ steps: BWC_VERSION: 7.7.1 - label: "{{matrix.image}} / 7.8.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.0 timeout_in_minutes: 300 matrix: setup: @@ -369,7 +369,7 @@ steps: BWC_VERSION: 7.8.0 - label: "{{matrix.image}} / 7.8.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.1 timeout_in_minutes: 300 matrix: setup: @@ -385,7 +385,7 @@ steps: BWC_VERSION: 7.8.1 - label: "{{matrix.image}} / 7.9.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.0 timeout_in_minutes: 300 matrix: setup: @@ -401,7 +401,7 @@ steps: BWC_VERSION: 7.9.0 - label: "{{matrix.image}} / 7.9.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.1 timeout_in_minutes: 300 matrix: setup: @@ -417,7 +417,7 @@ steps: BWC_VERSION: 7.9.1 - label: "{{matrix.image}} / 7.9.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.2 timeout_in_minutes: 300 matrix: setup: @@ -433,7 +433,7 @@ steps: BWC_VERSION: 7.9.2 - label: "{{matrix.image}} / 7.9.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.3 timeout_in_minutes: 300 matrix: setup: @@ -449,7 +449,7 @@ steps: BWC_VERSION: 7.9.3 - label: "{{matrix.image}} / 7.10.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.0 timeout_in_minutes: 300 matrix: setup: @@ -465,7 +465,7 @@ steps: BWC_VERSION: 7.10.0 - label: "{{matrix.image}} / 7.10.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.1 timeout_in_minutes: 300 matrix: setup: @@ -481,7 +481,7 @@ steps: BWC_VERSION: 7.10.1 - label: "{{matrix.image}} / 7.10.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.2 timeout_in_minutes: 300 matrix: setup: @@ -497,7 +497,7 @@ steps: BWC_VERSION: 7.10.2 - label: "{{matrix.image}} / 7.11.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.0 timeout_in_minutes: 300 matrix: setup: @@ -513,7 +513,7 @@ steps: BWC_VERSION: 7.11.0 - label: "{{matrix.image}} / 7.11.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.1 timeout_in_minutes: 300 matrix: setup: @@ -529,7 +529,7 @@ steps: BWC_VERSION: 7.11.1 - label: "{{matrix.image}} / 7.11.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.2 timeout_in_minutes: 300 matrix: setup: @@ -545,7 +545,7 @@ steps: BWC_VERSION: 7.11.2 - label: "{{matrix.image}} / 7.12.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.0 timeout_in_minutes: 300 matrix: setup: @@ -561,7 +561,7 @@ steps: BWC_VERSION: 7.12.0 - label: "{{matrix.image}} / 7.12.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.1 timeout_in_minutes: 300 matrix: setup: @@ -577,7 +577,7 @@ steps: BWC_VERSION: 7.12.1 - label: "{{matrix.image}} / 7.13.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.0 timeout_in_minutes: 300 matrix: setup: @@ -593,7 +593,7 @@ steps: BWC_VERSION: 7.13.0 - label: "{{matrix.image}} / 7.13.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.1 timeout_in_minutes: 300 matrix: setup: @@ -609,7 +609,7 @@ steps: BWC_VERSION: 7.13.1 - label: "{{matrix.image}} / 7.13.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.2 timeout_in_minutes: 300 matrix: setup: @@ -625,7 +625,7 @@ steps: BWC_VERSION: 7.13.2 - label: "{{matrix.image}} / 7.13.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.3 timeout_in_minutes: 300 matrix: setup: @@ -641,7 +641,7 @@ steps: BWC_VERSION: 7.13.3 - label: "{{matrix.image}} / 7.13.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.4 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.4 timeout_in_minutes: 300 matrix: setup: @@ -657,7 +657,7 @@ steps: BWC_VERSION: 7.13.4 - label: "{{matrix.image}} / 7.14.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.0 timeout_in_minutes: 300 matrix: setup: @@ -673,7 +673,7 @@ steps: BWC_VERSION: 7.14.0 - label: "{{matrix.image}} / 7.14.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.1 timeout_in_minutes: 300 matrix: setup: @@ -689,7 +689,7 @@ steps: BWC_VERSION: 7.14.1 - label: "{{matrix.image}} / 7.14.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.2 timeout_in_minutes: 300 matrix: setup: @@ -705,7 +705,7 @@ steps: BWC_VERSION: 7.14.2 - label: "{{matrix.image}} / 7.15.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.0 timeout_in_minutes: 300 matrix: setup: @@ -721,7 +721,7 @@ steps: BWC_VERSION: 7.15.0 - label: "{{matrix.image}} / 7.15.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.1 timeout_in_minutes: 300 matrix: setup: @@ -737,7 +737,7 @@ steps: BWC_VERSION: 7.15.1 - label: "{{matrix.image}} / 7.15.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.2 timeout_in_minutes: 300 matrix: setup: @@ -753,7 +753,7 @@ steps: BWC_VERSION: 7.15.2 - label: "{{matrix.image}} / 7.16.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.0 timeout_in_minutes: 300 matrix: setup: @@ -769,7 +769,7 @@ steps: BWC_VERSION: 7.16.0 - label: "{{matrix.image}} / 7.16.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.1 timeout_in_minutes: 300 matrix: setup: @@ -785,7 +785,7 @@ steps: BWC_VERSION: 7.16.1 - label: "{{matrix.image}} / 7.16.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.2 timeout_in_minutes: 300 matrix: setup: @@ -801,7 +801,7 @@ steps: BWC_VERSION: 7.16.2 - label: "{{matrix.image}} / 7.16.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.3 timeout_in_minutes: 300 matrix: setup: @@ -817,7 +817,7 @@ steps: BWC_VERSION: 7.16.3 - label: "{{matrix.image}} / 7.17.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.0 timeout_in_minutes: 300 matrix: setup: @@ -833,7 +833,7 @@ steps: BWC_VERSION: 7.17.0 - label: "{{matrix.image}} / 7.17.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.1 timeout_in_minutes: 300 matrix: setup: @@ -849,7 +849,7 @@ steps: BWC_VERSION: 7.17.1 - label: "{{matrix.image}} / 7.17.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.2 timeout_in_minutes: 300 matrix: setup: @@ -865,7 +865,7 @@ steps: BWC_VERSION: 7.17.2 - label: "{{matrix.image}} / 7.17.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.3 timeout_in_minutes: 300 matrix: setup: @@ -881,7 +881,7 @@ steps: BWC_VERSION: 7.17.3 - label: "{{matrix.image}} / 7.17.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.4 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.4 timeout_in_minutes: 300 matrix: setup: @@ -897,7 +897,7 @@ steps: BWC_VERSION: 7.17.4 - label: "{{matrix.image}} / 7.17.5 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.5 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.5 timeout_in_minutes: 300 matrix: setup: @@ -913,7 +913,7 @@ steps: BWC_VERSION: 7.17.5 - label: "{{matrix.image}} / 7.17.6 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.6 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.6 timeout_in_minutes: 300 matrix: setup: @@ -929,7 +929,7 @@ steps: BWC_VERSION: 7.17.6 - label: "{{matrix.image}} / 7.17.7 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.7 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.7 timeout_in_minutes: 300 matrix: setup: @@ -945,7 +945,7 @@ steps: BWC_VERSION: 7.17.7 - label: "{{matrix.image}} / 7.17.8 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.8 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.8 timeout_in_minutes: 300 matrix: setup: @@ -961,7 +961,7 @@ steps: BWC_VERSION: 7.17.8 - label: "{{matrix.image}} / 7.17.9 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.9 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.9 timeout_in_minutes: 300 matrix: setup: @@ -977,7 +977,7 @@ steps: BWC_VERSION: 7.17.9 - label: "{{matrix.image}} / 7.17.10 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.10 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.10 timeout_in_minutes: 300 matrix: setup: @@ -993,7 +993,7 @@ steps: BWC_VERSION: 7.17.10 - label: "{{matrix.image}} / 7.17.11 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.11 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.11 timeout_in_minutes: 300 matrix: setup: @@ -1009,7 +1009,7 @@ steps: BWC_VERSION: 7.17.11 - label: "{{matrix.image}} / 7.17.12 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.12 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.12 timeout_in_minutes: 300 matrix: setup: @@ -1025,7 +1025,7 @@ steps: BWC_VERSION: 7.17.12 - label: "{{matrix.image}} / 7.17.13 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.13 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.13 timeout_in_minutes: 300 matrix: setup: @@ -1041,7 +1041,7 @@ steps: BWC_VERSION: 7.17.13 - label: "{{matrix.image}} / 7.17.14 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.14 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.14 timeout_in_minutes: 300 matrix: setup: @@ -1057,7 +1057,7 @@ steps: BWC_VERSION: 7.17.14 - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 matrix: setup: @@ -1073,7 +1073,7 @@ steps: BWC_VERSION: 8.0.0 - label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1 timeout_in_minutes: 300 matrix: setup: @@ -1089,7 +1089,7 @@ steps: BWC_VERSION: 8.0.1 - label: "{{matrix.image}} / 8.1.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.0 timeout_in_minutes: 300 matrix: setup: @@ -1105,7 +1105,7 @@ steps: BWC_VERSION: 8.1.0 - label: "{{matrix.image}} / 8.1.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.1 timeout_in_minutes: 300 matrix: setup: @@ -1121,7 +1121,7 @@ steps: BWC_VERSION: 8.1.1 - label: "{{matrix.image}} / 8.1.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.2 timeout_in_minutes: 300 matrix: setup: @@ -1137,7 +1137,7 @@ steps: BWC_VERSION: 8.1.2 - label: "{{matrix.image}} / 8.1.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.3 timeout_in_minutes: 300 matrix: setup: @@ -1153,7 +1153,7 @@ steps: BWC_VERSION: 8.1.3 - label: "{{matrix.image}} / 8.2.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.0 timeout_in_minutes: 300 matrix: setup: @@ -1169,7 +1169,7 @@ steps: BWC_VERSION: 8.2.0 - label: "{{matrix.image}} / 8.2.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.1 timeout_in_minutes: 300 matrix: setup: @@ -1185,7 +1185,7 @@ steps: BWC_VERSION: 8.2.1 - label: "{{matrix.image}} / 8.2.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.2 timeout_in_minutes: 300 matrix: setup: @@ -1201,7 +1201,7 @@ steps: BWC_VERSION: 8.2.2 - label: "{{matrix.image}} / 8.2.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.3 timeout_in_minutes: 300 matrix: setup: @@ -1217,7 +1217,7 @@ steps: BWC_VERSION: 8.2.3 - label: "{{matrix.image}} / 8.3.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.0 timeout_in_minutes: 300 matrix: setup: @@ -1233,7 +1233,7 @@ steps: BWC_VERSION: 8.3.0 - label: "{{matrix.image}} / 8.3.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.1 timeout_in_minutes: 300 matrix: setup: @@ -1249,7 +1249,7 @@ steps: BWC_VERSION: 8.3.1 - label: "{{matrix.image}} / 8.3.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.2 timeout_in_minutes: 300 matrix: setup: @@ -1265,7 +1265,7 @@ steps: BWC_VERSION: 8.3.2 - label: "{{matrix.image}} / 8.3.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.3 timeout_in_minutes: 300 matrix: setup: @@ -1281,7 +1281,7 @@ steps: BWC_VERSION: 8.3.3 - label: "{{matrix.image}} / 8.4.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.0 timeout_in_minutes: 300 matrix: setup: @@ -1297,7 +1297,7 @@ steps: BWC_VERSION: 8.4.0 - label: "{{matrix.image}} / 8.4.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.1 timeout_in_minutes: 300 matrix: setup: @@ -1313,7 +1313,7 @@ steps: BWC_VERSION: 8.4.1 - label: "{{matrix.image}} / 8.4.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.2 timeout_in_minutes: 300 matrix: setup: @@ -1329,7 +1329,7 @@ steps: BWC_VERSION: 8.4.2 - label: "{{matrix.image}} / 8.4.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.3 timeout_in_minutes: 300 matrix: setup: @@ -1345,7 +1345,7 @@ steps: BWC_VERSION: 8.4.3 - label: "{{matrix.image}} / 8.5.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.0 timeout_in_minutes: 300 matrix: setup: @@ -1361,7 +1361,7 @@ steps: BWC_VERSION: 8.5.0 - label: "{{matrix.image}} / 8.5.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.1 timeout_in_minutes: 300 matrix: setup: @@ -1377,7 +1377,7 @@ steps: BWC_VERSION: 8.5.1 - label: "{{matrix.image}} / 8.5.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.2 timeout_in_minutes: 300 matrix: setup: @@ -1393,7 +1393,7 @@ steps: BWC_VERSION: 8.5.2 - label: "{{matrix.image}} / 8.5.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.3 timeout_in_minutes: 300 matrix: setup: @@ -1409,7 +1409,7 @@ steps: BWC_VERSION: 8.5.3 - label: "{{matrix.image}} / 8.6.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.0 timeout_in_minutes: 300 matrix: setup: @@ -1425,7 +1425,7 @@ steps: BWC_VERSION: 8.6.0 - label: "{{matrix.image}} / 8.6.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.1 timeout_in_minutes: 300 matrix: setup: @@ -1441,7 +1441,7 @@ steps: BWC_VERSION: 8.6.1 - label: "{{matrix.image}} / 8.6.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.2 timeout_in_minutes: 300 matrix: setup: @@ -1457,7 +1457,7 @@ steps: BWC_VERSION: 8.6.2 - label: "{{matrix.image}} / 8.7.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.0 timeout_in_minutes: 300 matrix: setup: @@ -1473,7 +1473,7 @@ steps: BWC_VERSION: 8.7.0 - label: "{{matrix.image}} / 8.7.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.1 timeout_in_minutes: 300 matrix: setup: @@ -1489,7 +1489,7 @@ steps: BWC_VERSION: 8.7.1 - label: "{{matrix.image}} / 8.8.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.0 timeout_in_minutes: 300 matrix: setup: @@ -1505,7 +1505,7 @@ steps: BWC_VERSION: 8.8.0 - label: "{{matrix.image}} / 8.8.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.1 timeout_in_minutes: 300 matrix: setup: @@ -1521,7 +1521,7 @@ steps: BWC_VERSION: 8.8.1 - label: "{{matrix.image}} / 8.8.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.2 timeout_in_minutes: 300 matrix: setup: @@ -1537,7 +1537,7 @@ steps: BWC_VERSION: 8.8.2 - label: "{{matrix.image}} / 8.9.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.0 timeout_in_minutes: 300 matrix: setup: @@ -1553,7 +1553,7 @@ steps: BWC_VERSION: 8.9.0 - label: "{{matrix.image}} / 8.9.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.1 timeout_in_minutes: 300 matrix: setup: @@ -1569,7 +1569,7 @@ steps: BWC_VERSION: 8.9.1 - label: "{{matrix.image}} / 8.9.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.2 timeout_in_minutes: 300 matrix: setup: @@ -1585,7 +1585,7 @@ steps: BWC_VERSION: 8.9.2 - label: "{{matrix.image}} / 8.10.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.0 timeout_in_minutes: 300 matrix: setup: @@ -1601,7 +1601,7 @@ steps: BWC_VERSION: 8.10.0 - label: "{{matrix.image}} / 8.10.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.1 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.1 timeout_in_minutes: 300 matrix: setup: @@ -1617,7 +1617,7 @@ steps: BWC_VERSION: 8.10.1 - label: "{{matrix.image}} / 8.10.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.2 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.2 timeout_in_minutes: 300 matrix: setup: @@ -1633,7 +1633,7 @@ steps: BWC_VERSION: 8.10.2 - label: "{{matrix.image}} / 8.10.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.3 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.3 timeout_in_minutes: 300 matrix: setup: @@ -1649,7 +1649,7 @@ steps: BWC_VERSION: 8.10.3 - label: "{{matrix.image}} / 8.11.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.0 + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.0 timeout_in_minutes: 300 matrix: setup: diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 8522ead742768..08c9fda4c9a6b 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -26,8 +26,9 @@ steps: agents: provider: gcp image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: n1-standard-32 + localSsds: 1 + localSsdInterface: nvme + machineType: custom-32-98304 env: {} - group: platform-support-windows steps: diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index e4f844afc3f41..ec3ae76ffcdfb 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -103,6 +103,73 @@ steps: image: family/elasticsearch-ubuntu-2004 diskSizeGb: 350 machineType: custom-32-98304 + - group: third-party tests + steps: + - label: third-party / azure-sas + command: | + export azure_storage_container=elasticsearch-ci-thirdparty-sas + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_SAS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / azure + command: | + export azure_storage_container=elasticsearch-ci-thirdparty + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / gcs + command: | + export google_storage_bucket=elasticsearch-ci-thirdparty + export google_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh gcsThirdPartyTest + env: + USE_3RD_PARTY_GCS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / geoip + command: | + .ci/scripts/run-gradle.sh :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / s3 + command: | + export amazon_s3_bucket=elasticsearch-ci.us-west-2 + export amazon_s3_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh s3ThirdPartyTest + env: + USE_3RD_PARTY_S3_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 967dcbb8cf535..f0a3cfdfd1f50 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -1124,6 +1124,72 @@ steps: image: family/elasticsearch-ubuntu-2004 diskSizeGb: 350 machineType: custom-32-98304 + - group: third-party tests + steps: + - label: third-party / azure-sas + command: | + export azure_storage_container=elasticsearch-ci-thirdparty-sas + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_SAS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / azure + command: | + export azure_storage_container=elasticsearch-ci-thirdparty + export azure_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh azureThirdPartyTest + env: + USE_3RD_PARTY_AZURE_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / gcs + command: | + export google_storage_bucket=elasticsearch-ci-thirdparty + export google_storage_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh gcsThirdPartyTest + env: + USE_3RD_PARTY_GCS_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk + - label: third-party / geoip + command: | + .ci/scripts/run-gradle.sh :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + - label: third-party / s3 + command: | + export amazon_s3_bucket=elasticsearch-ci.us-west-2 + export amazon_s3_base_path=$BUILDKITE_BRANCH + + .ci/scripts/run-gradle.sh s3ThirdPartyTest + env: + USE_3RD_PARTY_S3_CREDENTIALS: "true" + timeout_in_minutes: 30 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n2-standard-8 + buildDirectory: /dev/shm/bk - label: Upload Snyk Dependency Graph command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH env: diff --git a/.buildkite/pipelines/pull-request/.defaults.yml b/.buildkite/pipelines/pull-request/.defaults.yml new file mode 100644 index 0000000000000..84d73cbd738a2 --- /dev/null +++ b/.buildkite/pipelines/pull-request/.defaults.yml @@ -0,0 +1,6 @@ +config: + skip-labels: ">test-mute" + excluded-regions: + - ^docs/.* + - ^x-pack/docs/.* +# Note that there is also a trigger-phrase default inside pull-request.ts (it's dynamic based on the name of each pipeline file) diff --git a/.buildkite/pipelines/pull-request/build-benchmark.yml b/.buildkite/pipelines/pull-request/build-benchmark.yml new file mode 100644 index 0000000000000..8d3215b8393ce --- /dev/null +++ b/.buildkite/pipelines/pull-request/build-benchmark.yml @@ -0,0 +1,24 @@ +config: + allow-labels: build-benchmark + trigger-phrase: '.*run\W+elasticsearch-ci/build-bench.*' +steps: + - label: build-benchmark / {{matrix.part}} + key: "build-benchmark" + command: | + .ci/scripts/run-gradle.sh :build-tools-internal:bootstrapPerformanceTests + .ci/scripts/install-gradle-profiler.sh + .ci/scripts/run-gradle-profiler.sh --benchmark --scenario-file build-tools-internal/build/performanceTests/elasticsearch-build-benchmark-{{matrix.part}}.scenarios --project-dir . --output-dir profile-out + mkdir build + tar -czf build/$BUILDKITE_BUILD_NUMBER.tar.bz2 profile-out + matrix: + setup: + part: + - part1 + - part2 + env: + BUILD_PERFORMANCE_TEST: "true" + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/bwc-snapshots-windows.yml b/.buildkite/pipelines/pull-request/bwc-snapshots-windows.yml new file mode 100644 index 0000000000000..d37bdf380f926 --- /dev/null +++ b/.buildkite/pipelines/pull-request/bwc-snapshots-windows.yml @@ -0,0 +1,20 @@ +config: + allow-labels: test-windows +steps: + - group: bwc-snapshots-windows + steps: + - label: "{{matrix.BWC_VERSION}} / bwc-snapshots-windows" + key: "bwc-snapshots-windows" + command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh + env: + GRADLE_TASK: "v{{matrix.BWC_VERSION}}#bwcTest" + timeout_in_minutes: 300 + matrix: + setup: + BWC_VERSION: $SNAPSHOT_BWC_VERSIONS + agents: + provider: gcp + image: family/elasticsearch-windows-2022 + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 diff --git a/.buildkite/pipelines/pull-request/bwc-snapshots.yml b/.buildkite/pipelines/pull-request/bwc-snapshots.yml new file mode 100644 index 0000000000000..21873475056ea --- /dev/null +++ b/.buildkite/pipelines/pull-request/bwc-snapshots.yml @@ -0,0 +1,20 @@ +config: + trigger-phrase: '.*run\W+elasticsearch-ci/bwc.*' + skip-labels: + - ">test-mute" + - "test-full-bwc" +steps: + - group: bwc-snapshots + steps: + - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" + key: "bwc-snapshots" + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest + timeout_in_minutes: 300 + matrix: + setup: + BWC_VERSION: $SNAPSHOT_BWC_VERSIONS + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/cloud-deploy.yml b/.buildkite/pipelines/pull-request/cloud-deploy.yml new file mode 100644 index 0000000000000..ce8e8206d51ff --- /dev/null +++ b/.buildkite/pipelines/pull-request/cloud-deploy.yml @@ -0,0 +1,13 @@ +config: + allow-labels: cloud-deploy +steps: + - label: cloud-deploy + command: .buildkite/scripts/cloud-deploy.sh + env: + USE_PROD_DOCKER_CREDENTIALS: "true" + timeout_in_minutes: 20 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/docs-check.yml b/.buildkite/pipelines/pull-request/docs-check.yml new file mode 100644 index 0000000000000..2201eb2d1e4ea --- /dev/null +++ b/.buildkite/pipelines/pull-request/docs-check.yml @@ -0,0 +1,14 @@ +config: + included-regions: + - ^docs/.* + - ^x-pack/docs/.* + excluded-regions: [] +steps: + - label: docs-check + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed precommit :docs:check + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/eql-correctness.yml b/.buildkite/pipelines/pull-request/eql-correctness.yml new file mode 100644 index 0000000000000..8f7ca6942c0e9 --- /dev/null +++ b/.buildkite/pipelines/pull-request/eql-correctness.yml @@ -0,0 +1,9 @@ +steps: + - label: eql-correctness + command: .buildkite/scripts/eql-correctness.sh + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/example-plugins.yml b/.buildkite/pipelines/pull-request/example-plugins.yml new file mode 100644 index 0000000000000..18d0de6594980 --- /dev/null +++ b/.buildkite/pipelines/pull-request/example-plugins.yml @@ -0,0 +1,18 @@ +config: + included-regions: + - build-conventions/.* + - build-tools/.* + - build-tools-internal/.* + - plugins/examples/.* +steps: + - label: example-plugins + command: |- + cd $$WORKSPACE/plugins/examples + + $$WORKSPACE/.ci/scripts/run-gradle.sh -Dorg.gradle.jvmargs=-Xmx8g build --include-build $$WORKSPACE + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/full-bwc.yml b/.buildkite/pipelines/pull-request/full-bwc.yml new file mode 100644 index 0000000000000..d3fa8eccaf7d9 --- /dev/null +++ b/.buildkite/pipelines/pull-request/full-bwc.yml @@ -0,0 +1,15 @@ +config: + allow-labels: test-full-bwc +steps: + - group: bwc + steps: + - label: $BWC_VERSION / bwc + key: "full-bwc:$BWC_VERSION_SNAKE" + bwc_template: true + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$BWC_VERSION#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml new file mode 100644 index 0000000000000..98bc61ea33738 --- /dev/null +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml @@ -0,0 +1,27 @@ +config: + skip-labels: + - ">test-mute" + - ":Delivery/Packaging" +steps: + - group: packaging-tests-unix-sample + steps: + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix-sample" + key: "packaging-tests-unix-sample" + command: ./.ci/scripts/packaging-test.sh $$PACKAGING_TASK + timeout_in_minutes: 300 + matrix: + setup: + image: + - rhel-8 + - ubuntu-2004 + PACKAGING_TASK: + - destructiveDistroTest.docker + - destructiveDistroTest.packages + - destructiveDistroTest.archives + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + diskSizeGb: 350 + machineType: custom-16-32768 + env: + PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}" diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml new file mode 100644 index 0000000000000..ec2e29b284781 --- /dev/null +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -0,0 +1,39 @@ +config: + allow-labels: ":Delivery/Packaging" +steps: + - group: packaging-tests-unix + steps: + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix" + key: "packaging-tests-unix" + command: ./.ci/scripts/packaging-test.sh $$PACKAGING_TASK + timeout_in_minutes: 300 + matrix: + setup: + image: + - centos-7 + - debian-10 + - debian-11 + - opensuse-leap-15 + - oraclelinux-7 + - oraclelinux-8 + - sles-12 + - sles-15 + - ubuntu-1804 + - ubuntu-2004 + - ubuntu-2204 + - rocky-8 + - rhel-7 + - rhel-8 + - rhel-9 + - almalinux-8 + PACKAGING_TASK: + - destructiveDistroTest.docker + - destructiveDistroTest.packages + - destructiveDistroTest.archives + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + diskSizeGb: 350 + machineType: custom-16-32768 + env: + PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}" diff --git a/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml b/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml new file mode 100644 index 0000000000000..bcf38f51f4a66 --- /dev/null +++ b/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml @@ -0,0 +1,25 @@ +config: + skip-labels: + - ">test-mute" + - ":Delivery/Packaging" +steps: + - group: packaging-tests-windows-sample + steps: + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-windows-sample" + key: "packaging-tests-windows-sample" + command: .\.buildkite\scripts\run-script.ps1 .\.ci\scripts\packaging-test.ps1 -GradleTasks destructiveDistroTest.{{matrix.PACKAGING_TASK}} + timeout_in_minutes: 300 + matrix: + setup: + image: + - windows-2019 + PACKAGING_TASK: + - default-windows-archive + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 + env: + PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}" diff --git a/.buildkite/pipelines/pull-request/packaging-tests-windows.yml b/.buildkite/pipelines/pull-request/packaging-tests-windows.yml new file mode 100644 index 0000000000000..651a82982460f --- /dev/null +++ b/.buildkite/pipelines/pull-request/packaging-tests-windows.yml @@ -0,0 +1,25 @@ +config: + allow-labels: ":Delivery/Packaging" +steps: + - group: packaging-tests-windows + steps: + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-windows" + key: "packaging-tests-windows" + command: .\.buildkite\scripts\run-script.ps1 .\.ci\scripts\packaging-test.ps1 -GradleTasks destructiveDistroTest.{{matrix.PACKAGING_TASK}} + timeout_in_minutes: 300 + matrix: + setup: + image: + - windows-2016 + - windows-2019 + - windows-2022 + PACKAGING_TASK: + - default-windows-archive + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 + env: + PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}" diff --git a/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml b/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml new file mode 100644 index 0000000000000..c62cf23310422 --- /dev/null +++ b/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml @@ -0,0 +1,22 @@ +config: + allow-labels: ":Delivery/Packaging" +steps: + - group: packaging-tests-upgrade + steps: + - label: "{{matrix.image}} / $BWC_VERSION / packaging-tests-upgrade" + key: "packaging-tests-upgrade:$BWC_VERSION_SNAKE" + command: ./.ci/scripts/packaging-test.sh destructiveDistroUpgradeTest.v$BWC_VERSION + timeout_in_minutes: 300 + bwc_template: true + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: $BWC_VERSION diff --git a/.buildkite/pipelines/pull-request/part-1-fips.yml b/.buildkite/pipelines/pull-request/part-1-fips.yml new file mode 100644 index 0000000000000..42f930c1bde9a --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-1-fips.yml @@ -0,0 +1,11 @@ +config: + allow-labels: "Team:Security" +steps: + - label: part-1-fips + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart1 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-1-windows.yml b/.buildkite/pipelines/pull-request/part-1-windows.yml new file mode 100644 index 0000000000000..20d46ebaa7406 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-1-windows.yml @@ -0,0 +1,14 @@ +config: + allow-labels: "test-windows" +steps: + - label: part-1-windows + command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-windows-2022 + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 + env: + GRADLE_TASK: checkPart1 diff --git a/.buildkite/pipelines/pull-request/part-1.yml b/.buildkite/pipelines/pull-request/part-1.yml new file mode 100644 index 0000000000000..3d467c6c41e43 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-1.yml @@ -0,0 +1,9 @@ +steps: + - label: part-1 + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart1 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-2-fips.yml b/.buildkite/pipelines/pull-request/part-2-fips.yml new file mode 100644 index 0000000000000..6a3647ceb50ae --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-2-fips.yml @@ -0,0 +1,11 @@ +config: + allow-labels: "Team:Security" +steps: + - label: part-2-fips + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart2 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-2-windows.yml b/.buildkite/pipelines/pull-request/part-2-windows.yml new file mode 100644 index 0000000000000..f38df244e8389 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-2-windows.yml @@ -0,0 +1,14 @@ +config: + allow-labels: "test-windows" +steps: + - label: part-2-windows + command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-windows-2022 + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 + env: + GRADLE_TASK: checkPart2 diff --git a/.buildkite/pipelines/pull-request/part-2.yml b/.buildkite/pipelines/pull-request/part-2.yml new file mode 100644 index 0000000000000..43de69bbcd945 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-2.yml @@ -0,0 +1,9 @@ +steps: + - label: part-2 + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart2 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-3-fips.yml b/.buildkite/pipelines/pull-request/part-3-fips.yml new file mode 100644 index 0000000000000..cee3ea153acb9 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-3-fips.yml @@ -0,0 +1,11 @@ +config: + allow-labels: "Team:Security" +steps: + - label: part-3-fips + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart3 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/part-3-windows.yml b/.buildkite/pipelines/pull-request/part-3-windows.yml new file mode 100644 index 0000000000000..3bad740aedb72 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-3-windows.yml @@ -0,0 +1,14 @@ +config: + allow-labels: "test-windows" +steps: + - label: part-3-windows + command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-windows-2022 + machineType: custom-32-98304 + diskType: pd-ssd + diskSizeGb: 350 + env: + GRADLE_TASK: checkPart3 diff --git a/.buildkite/pipelines/pull-request/part-3.yml b/.buildkite/pipelines/pull-request/part-3.yml new file mode 100644 index 0000000000000..12abae7634822 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-3.yml @@ -0,0 +1,11 @@ +config: + skip-target-branches: "7.17" +steps: + - label: part-3 + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart3 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/precommit.yml b/.buildkite/pipelines/pull-request/precommit.yml new file mode 100644 index 0000000000000..f6548dfeed9b2 --- /dev/null +++ b/.buildkite/pipelines/pull-request/precommit.yml @@ -0,0 +1,12 @@ +config: + allow-labels: ">test-mute" + skip-labels: [] +steps: + - label: precommit + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed precommit + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/pipelines/pull-request/release-tests.yml b/.buildkite/pipelines/pull-request/release-tests.yml new file mode 100644 index 0000000000000..7d7a5c77d3320 --- /dev/null +++ b/.buildkite/pipelines/pull-request/release-tests.yml @@ -0,0 +1,11 @@ +config: + allow-labels: test-release +steps: + - label: release-tests + command: .buildkite/scripts/release-tests.sh + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + diskSizeGb: 350 + machineType: custom-32-98304 diff --git a/.buildkite/pipelines/pull-request/rest-compatibility.yml b/.buildkite/pipelines/pull-request/rest-compatibility.yml new file mode 100644 index 0000000000000..a69810e23d960 --- /dev/null +++ b/.buildkite/pipelines/pull-request/rest-compatibility.yml @@ -0,0 +1,11 @@ +config: + skip-target-branches: "7.17" +steps: + - label: rest-compatibility + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkRestCompat + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/scripts/cloud-deploy.sh b/.buildkite/scripts/cloud-deploy.sh new file mode 100755 index 0000000000000..2b98aa224406b --- /dev/null +++ b/.buildkite/scripts/cloud-deploy.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -euo pipefail + +.ci/scripts/run-gradle.sh buildCloudDockerImage + +ES_VERSION=$(grep 'elasticsearch' build-tools-internal/version.properties | awk '{print $3}') +DOCKER_TAG="docker.elastic.co/elasticsearch-ci/elasticsearch-cloud:${ES_VERSION}-${BUILDKITE_COMMIT:0:7}" +docker tag elasticsearch-cloud:test "$DOCKER_TAG" + +echo "$DOCKER_REGISTRY_PASSWORD" | docker login -u "$DOCKER_REGISTRY_USERNAME" --password-stdin docker.elastic.co +unset DOCKER_REGISTRY_USERNAME DOCKER_REGISTRY_PASSWORD + +docker push "$DOCKER_TAG" diff --git a/.buildkite/scripts/get-legacy-secret.sh b/.buildkite/scripts/get-legacy-secret.sh new file mode 100755 index 0000000000000..3df6c27f484f2 --- /dev/null +++ b/.buildkite/scripts/get-legacy-secret.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -euo pipefail + +# WARNING: this script will echo the credentials to the console. It is meant to be called from another script and captured in a variable. +# It should really only be used inside .buildkite/hooks/pre-command + +source .buildkite/scripts/setup-legacy-vault.sh + +vault read -format=json "$1" diff --git a/.buildkite/scripts/pull-request/README.md b/.buildkite/scripts/pull-request/README.md new file mode 100644 index 0000000000000..5fc1d564dc74f --- /dev/null +++ b/.buildkite/scripts/pull-request/README.md @@ -0,0 +1,38 @@ +# Pull Request pipeline generator + +## Overview + +Each time a pull request build is triggered, such as via commit or comment, we use this generator to dynamically create the steps that are needed to run. + +The generator handles the following: + + - `allow-labels` - only trigger a step if the PR has one of these labels + - `skip-labels` - don't trigger the step if the PR has one of these labels + - `excluded-regions` - don't trigger the step if **all** of the changes in the PR match these paths/regexes + - `included-regions` - trigger the step if **all** of the changes in the PR match these paths/regexes + - `trigger-phrase` - trigger this step, and ignore all other steps, if the build was triggered by a comment and that comment matches this regex + - Note that each step has an automatic phrase of `.*run\\W+elasticsearch-ci/.*` + - Replacing `$SNAPSHOT_BWC_VERSIONS` in pipelines with an array of versions from `.ci/snapshotBwcVersions` + - Duplicating any step with `bwc_template: true` for each BWC version in `.ci/bwcVersions` + +[Bun](https://bun.sh/) is used to test and run the TypeScript. It's an alternative JavaScript runtime that natively handles TypeScript. + +### Pipelines Location + +Pipelines are in [`.buildkite/pipelines`](../../pipelines/pull-request). They are automatically picked up and given a name based on their filename. + + +## Setup + +- [Install bun](https://bun.sh/) + - `npm install -g bun` will work if you already have `npm` +- `cd .buildkite; bun install` to install dependencies + +## Run tests + +```bash +cd .buildkite +bun test +``` + +If you need to regenerate the snapshots, run `bun test --update-snapshots`. diff --git a/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap b/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap new file mode 100644 index 0000000000000..39cd3fe07beb4 --- /dev/null +++ b/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap @@ -0,0 +1,185 @@ +// Bun Snapshot v1, https://goo.gl/fbAQLP + +exports[`generatePipelines should generate correct pipelines with a non-docs change 1`] = ` +[ + { + "steps": [ + { + "group": "bwc-snapshots", + "steps": [ + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest", + "env": { + "BWC_VERSION": "{{matrix.BWC_VERSION}}", + }, + "label": "{{matrix.BWC_VERSION}} / bwc-snapshots", + "matrix": { + "setup": { + "BWC_VERSION": [ + "7.17.14", + "8.10.3", + "8.11.0", + ], + }, + }, + "timeout_in_minutes": 300, + }, + ], + }, + ], + }, + { + "env": { + "CUSTOM_ENV_VAR": "value", + }, + "steps": [ + { + "command": "echo 'hello world'", + "label": "test-step", + }, + ], + }, +] +`; + +exports[`generatePipelines should generate correct pipelines with only docs changes 1`] = ` +[ + { + "steps": [ + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dignore.tests.seed precommit :docs:check", + "label": "docs-check", + "timeout_in_minutes": 300, + }, + ], + }, +] +`; + +exports[`generatePipelines should generate correct pipelines with full BWC expansion 1`] = ` +[ + { + "steps": [ + { + "group": "bwc", + "steps": [ + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.0.0#bwcTest", + "env": { + "BWC_VERSION": "7.0.0", + }, + "key": "full-bwc:7_0_0", + "label": "7.0.0 / bwc", + "timeout_in_minutes": 300, + }, + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.0.1#bwcTest", + "env": { + "BWC_VERSION": "7.0.1", + }, + "key": "full-bwc:7_0_1", + "label": "7.0.1 / bwc", + "timeout_in_minutes": 300, + }, + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.1.0#bwcTest", + "env": { + "BWC_VERSION": "7.1.0", + }, + "key": "full-bwc:7_1_0", + "label": "7.1.0 / bwc", + "timeout_in_minutes": 300, + }, + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.10.0#bwcTest", + "env": { + "BWC_VERSION": "8.10.0", + }, + "key": "full-bwc:8_10_0", + "label": "8.10.0 / bwc", + "timeout_in_minutes": 300, + }, + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.11.0#bwcTest", + "env": { + "BWC_VERSION": "8.11.0", + }, + "key": "full-bwc:8_11_0", + "label": "8.11.0 / bwc", + "timeout_in_minutes": 300, + }, + ], + }, + ], + }, + { + "env": { + "CUSTOM_ENV_VAR": "value", + }, + "steps": [ + { + "command": "echo 'hello world'", + "label": "test-step", + }, + ], + }, +] +`; + +exports[`generatePipelines should generate correct pipeline when using a trigger comment for it 1`] = ` +[ + { + "env": { + "CUSTOM_ENV_VAR": "value", + }, + "steps": [ + { + "command": "echo 'hello world'", + "label": "test-step", + }, + ], + }, +] +`; diff --git a/.buildkite/scripts/pull-request/bwc-versions.ts b/.buildkite/scripts/pull-request/bwc-versions.ts new file mode 100644 index 0000000000000..adbe92fad76f4 --- /dev/null +++ b/.buildkite/scripts/pull-request/bwc-versions.ts @@ -0,0 +1,30 @@ +import { parse } from "yaml"; +import { readFileSync } from "fs"; +import { resolve } from "path"; + +const PROJECT_ROOT = resolve(`${import.meta.dir}/../../..`); + +let BWC_VERSIONS_PATH = `${PROJECT_ROOT}/.ci/bwcVersions`; +let BWC_VERSIONS: any; + +let SNAPSHOT_BWC_VERSIONS_PATH = `${PROJECT_ROOT}/.ci/snapshotBwcVersions`; +let SNAPSHOT_BWC_VERSIONS: any; + +export const getSnapshotBwcVersions = () => { + SNAPSHOT_BWC_VERSIONS = SNAPSHOT_BWC_VERSIONS ?? parse(readFileSync(SNAPSHOT_BWC_VERSIONS_PATH, "utf-8")).BWC_VERSION; + + return SNAPSHOT_BWC_VERSIONS; +}; + +export const getBwcVersions = () => { + BWC_VERSIONS = BWC_VERSIONS ?? parse(readFileSync(BWC_VERSIONS_PATH, "utf-8")).BWC_VERSION; + return BWC_VERSIONS; +}; + +export const setSnapshotBwcVersionsPath = (path: string) => { + SNAPSHOT_BWC_VERSIONS_PATH = path; +}; + +export const setBwcVersionsPath = (path: string) => { + BWC_VERSIONS_PATH = path; +}; diff --git a/.buildkite/scripts/pull-request/mocks/bwcVersions b/.buildkite/scripts/pull-request/mocks/bwcVersions new file mode 100644 index 0000000000000..0f4382943d70b --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/bwcVersions @@ -0,0 +1,6 @@ +BWC_VERSION: + - "7.0.0" + - "7.0.1" + - "7.1.0" + - "8.10.0" + - "8.11.0" diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/.defaults.yml b/.buildkite/scripts/pull-request/mocks/pipelines/.defaults.yml new file mode 100644 index 0000000000000..b5341c16a7e97 --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/pipelines/.defaults.yml @@ -0,0 +1,5 @@ +config: + skip-labels: ">test-mute" + excluded-regions: + - ^docs/.* + - ^x-pack/docs/.* diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/bwc-snapshots.yml b/.buildkite/scripts/pull-request/mocks/pipelines/bwc-snapshots.yml new file mode 100644 index 0000000000000..0f549ed9f1195 --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/pipelines/bwc-snapshots.yml @@ -0,0 +1,21 @@ +config: + trigger-phrase: '.*run\W+elasticsearch-ci/bwc.*' + skip-labels: + - ">test-mute" + - "test-full-bwc" +steps: + - group: bwc-snapshots + steps: + - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest + timeout_in_minutes: 300 + matrix: + setup: + BWC_VERSION: $SNAPSHOT_BWC_VERSIONS + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: "{{matrix.BWC_VERSION}}" diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/docs-check.yml b/.buildkite/scripts/pull-request/mocks/pipelines/docs-check.yml new file mode 100644 index 0000000000000..2201eb2d1e4ea --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/pipelines/docs-check.yml @@ -0,0 +1,14 @@ +config: + included-regions: + - ^docs/.* + - ^x-pack/docs/.* + excluded-regions: [] +steps: + - label: docs-check + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed precommit :docs:check + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/full-bwc.yml b/.buildkite/scripts/pull-request/mocks/pipelines/full-bwc.yml new file mode 100644 index 0000000000000..2737597815ad0 --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/pipelines/full-bwc.yml @@ -0,0 +1,17 @@ +config: + allow-labels: test-full-bwc +steps: + - group: bwc + steps: + - label: $BWC_VERSION / bwc + key: full-bwc:$BWC_VERSION_SNAKE + bwc_template: true + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$BWC_VERSION#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: $BWC_VERSION diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/using-defaults.yml b/.buildkite/scripts/pull-request/mocks/pipelines/using-defaults.yml new file mode 100644 index 0000000000000..a3b2010547a34 --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/pipelines/using-defaults.yml @@ -0,0 +1,5 @@ +env: + CUSTOM_ENV_VAR: "value" +steps: + - label: test-step + command: echo 'hello world' diff --git a/.buildkite/scripts/pull-request/mocks/snapshotBwcVersions b/.buildkite/scripts/pull-request/mocks/snapshotBwcVersions new file mode 100644 index 0000000000000..1bc1fa321d9da --- /dev/null +++ b/.buildkite/scripts/pull-request/mocks/snapshotBwcVersions @@ -0,0 +1,4 @@ +BWC_VERSION: + - "7.17.14" + - "8.10.3" + - "8.11.0" diff --git a/.buildkite/scripts/pull-request/pipeline.generate.ts b/.buildkite/scripts/pull-request/pipeline.generate.ts new file mode 100644 index 0000000000000..69caff990dcfe --- /dev/null +++ b/.buildkite/scripts/pull-request/pipeline.generate.ts @@ -0,0 +1,19 @@ +import { stringify } from "yaml"; +import { execSync } from "child_process"; + +import { generatePipelines } from "./pipeline"; + +const pipelines = generatePipelines(); + +for (const pipeline of pipelines) { + if (!process.env.CI) { + // Just for local debugging purposes + console.log(""); + console.log(stringify(pipeline)); + } else { + execSync(`buildkite-agent pipeline upload`, { + input: stringify(pipeline), + stdio: ["pipe", "inherit", "inherit"], + }); + } +} diff --git a/.buildkite/scripts/pull-request/pipeline.sh b/.buildkite/scripts/pull-request/pipeline.sh new file mode 100755 index 0000000000000..77bbc1e115430 --- /dev/null +++ b/.buildkite/scripts/pull-request/pipeline.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -euo pipefail + +npm install -g bun +bun .buildkite/scripts/pull-request/pipeline.generate.ts diff --git a/.buildkite/scripts/pull-request/pipeline.test.ts b/.buildkite/scripts/pull-request/pipeline.test.ts new file mode 100644 index 0000000000000..e13b1e1f73278 --- /dev/null +++ b/.buildkite/scripts/pull-request/pipeline.test.ts @@ -0,0 +1,38 @@ +import { beforeEach, describe, expect, test } from "bun:test"; + +import { generatePipelines } from "./pipeline"; +import { setBwcVersionsPath, setSnapshotBwcVersionsPath } from "./bwc-versions"; + +describe("generatePipelines", () => { + beforeEach(() => { + setBwcVersionsPath(`${import.meta.dir}/mocks/bwcVersions`); + setSnapshotBwcVersionsPath(`${import.meta.dir}/mocks/snapshotBwcVersions`); + + process.env["GITHUB_PR_LABELS"] = "test-label-1,test-label-2"; + process.env["GITHUB_PR_TRIGGER_COMMENT"] = ""; + }); + + test("should generate correct pipelines with a non-docs change", () => { + const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle", "docs/README.asciidoc"]); + expect(pipelines).toMatchSnapshot(); + }); + + test("should generate correct pipelines with only docs changes", () => { + const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["docs/README.asciidoc"]); + expect(pipelines).toMatchSnapshot(); + }); + + test("should generate correct pipelines with full BWC expansion", () => { + process.env["GITHUB_PR_LABELS"] = "test-full-bwc"; + + const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle"]); + expect(pipelines).toMatchSnapshot(); + }); + + test("should generate correct pipeline when using a trigger comment for it", () => { + process.env["GITHUB_PR_TRIGGER_COMMENT"] = "run elasticsearch-ci/using-defaults"; + + const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle"]); + expect(pipelines).toMatchSnapshot(); + }); +}); diff --git a/.buildkite/scripts/pull-request/pipeline.ts b/.buildkite/scripts/pull-request/pipeline.ts new file mode 100644 index 0000000000000..c4e12f2aa48fe --- /dev/null +++ b/.buildkite/scripts/pull-request/pipeline.ts @@ -0,0 +1,162 @@ +import { parse } from "yaml"; +import { readFileSync, readdirSync } from "fs"; +import { basename, resolve } from "path"; +import { execSync } from "child_process"; + +import { BuildkitePipeline, BuildkiteStep, EsPipeline, EsPipelineConfig } from "./types"; +import { getBwcVersions, getSnapshotBwcVersions } from "./bwc-versions"; + +const PROJECT_ROOT = resolve(`${import.meta.dir}/../../..`); + +const getArray = (strOrArray: string | string[] | undefined): string[] => { + if (typeof strOrArray === "undefined") { + return []; + } + + return typeof strOrArray === "string" ? [strOrArray] : strOrArray; +}; + +const labelCheckAllow = (pipeline: EsPipeline, labels: string[]): boolean => { + if (pipeline.config?.["allow-labels"]) { + return getArray(pipeline.config["allow-labels"]).some((label) => labels.includes(label)); + } + return true; +}; + +const labelCheckSkip = (pipeline: EsPipeline, labels: string[]): boolean => { + if (pipeline.config?.["skip-labels"]) { + return !getArray(pipeline.config["skip-labels"]).some((label) => labels.includes(label)); + } + return true; +}; + +// Exclude the pipeline if all of the changed files in the PR are in at least one excluded region +const changedFilesExcludedCheck = (pipeline: EsPipeline, changedFiles: string[]): boolean => { + if (pipeline.config?.["excluded-regions"]) { + return !changedFiles.every((file) => + getArray(pipeline.config?.["excluded-regions"]).some((region) => file.match(region)) + ); + } + return true; +}; + +// Include the pipeline if all of the changed files in the PR are in at least one included region +const changedFilesIncludedCheck = (pipeline: EsPipeline, changedFiles: string[]): boolean => { + if (pipeline.config?.["included-regions"]) { + return changedFiles.every((file) => + getArray(pipeline.config?.["included-regions"]).some((region) => file.match(region)) + ); + } + return true; +}; + +const triggerCommentCheck = (pipeline: EsPipeline): boolean => { + if (process.env["GITHUB_PR_TRIGGER_COMMENT"] && pipeline.config?.["trigger-phrase"]) { + return !!process.env["GITHUB_PR_TRIGGER_COMMENT"].match(pipeline.config["trigger-phrase"]); + } + return false; +}; + +// There are so many BWC versions that we can't use the matrix feature in Buildkite, as it's limited to 20 elements per dimension +// So we need to duplicate the steps instead +// Recursively check for any steps that have a bwc_template attribute and expand them out into multiple steps, one for each BWC_VERSION +const doBwcTransforms = (step: BuildkitePipeline | BuildkiteStep) => { + const stepsToExpand = (step.steps || []).filter((s) => s.bwc_template); + step.steps = (step.steps || []).filter((s) => !s.bwc_template); + + for (const s of step.steps) { + if (s.steps?.length) { + doBwcTransforms(s); + } + } + + for (const stepToExpand of stepsToExpand) { + for (const bwcVersion of getBwcVersions()) { + let newStepJson = JSON.stringify(stepToExpand).replaceAll("$BWC_VERSION_SNAKE", bwcVersion.replaceAll(".", "_")); + newStepJson = newStepJson.replaceAll("$BWC_VERSION", bwcVersion); + const newStep = JSON.parse(newStepJson); + delete newStep.bwc_template; + step.steps.push(newStep); + } + } +}; + +export const generatePipelines = ( + directory: string = `${PROJECT_ROOT}/.buildkite/pipelines/pull-request`, + changedFiles: string[] = [] +) => { + let defaults: EsPipelineConfig = { config: {} }; + defaults = parse(readFileSync(`${directory}/.defaults.yml`, "utf-8")); + defaults.config = defaults.config || {}; + + let pipelines: EsPipeline[] = []; + const files = readdirSync(directory); + for (const file of files) { + if (!file.endsWith(".yml") || file.endsWith(".defaults.yml")) { + continue; + } + + let yaml = readFileSync(`${directory}/${file}`, "utf-8"); + yaml = yaml.replaceAll("$SNAPSHOT_BWC_VERSIONS", JSON.stringify(getSnapshotBwcVersions())); + const pipeline: EsPipeline = parse(yaml) || {}; + + pipeline.config = { ...defaults.config, ...(pipeline.config || {}) }; + + // '.../build-benchmark.yml' => 'build-benchmark' + const name = basename(file).split(".", 2)[0]; + pipeline.name = name; + pipeline.config["trigger-phrase"] = pipeline.config["trigger-phrase"] || `.*run\\W+elasticsearch-ci/${name}.*`; + + pipelines.push(pipeline); + } + + const labels = (process.env["GITHUB_PR_LABELS"] || "") + .split(",") + .map((x) => x.trim()) + .filter((x) => x); + + if (!changedFiles?.length) { + const mergeBase = execSync(`git merge-base ${process.env["GITHUB_PR_TARGET_BRANCH"]} HEAD`, { cwd: PROJECT_ROOT }) + .toString() + .trim(); + + const changedFilesOutput = execSync(`git diff --name-only ${mergeBase}`, { cwd: PROJECT_ROOT }).toString().trim(); + + changedFiles = changedFilesOutput + .split("\n") + .map((x) => x.trim()) + .filter((x) => x); + } + + let filters: ((pipeline: EsPipeline) => boolean)[] = [ + (pipeline) => labelCheckAllow(pipeline, labels), + (pipeline) => labelCheckSkip(pipeline, labels), + (pipeline) => changedFilesExcludedCheck(pipeline, changedFiles), + (pipeline) => changedFilesIncludedCheck(pipeline, changedFiles), + ]; + + // When triggering via comment, we ONLY want to run pipelines that match the trigger phrase, regardless of labels, etc + if (process.env["GITHUB_PR_TRIGGER_COMMENT"]) { + filters = [triggerCommentCheck]; + } + + for (const filter of filters) { + pipelines = pipelines.filter(filter); + } + + for (const pipeline of pipelines) { + doBwcTransforms(pipeline); + } + + pipelines.sort((a, b) => (a.name ?? "").localeCompare(b.name ?? "")); + + const finalPipelines = pipelines.map((pipeline) => { + const finalPipeline = { ...pipeline }; + delete finalPipeline.config; + delete finalPipeline.name; + + return finalPipeline; + }); + + return finalPipelines; +}; diff --git a/.buildkite/scripts/pull-request/types.ts b/.buildkite/scripts/pull-request/types.ts new file mode 100644 index 0000000000000..15140a03fb86a --- /dev/null +++ b/.buildkite/scripts/pull-request/types.ts @@ -0,0 +1,24 @@ +export type EsPipelineConfig = { + config?: { + "allow-labels"?: string | string[]; + "skip-labels"?: string | string[]; + "included-regions"?: string | string[]; + "excluded-regions"?: string | string[]; + "trigger-phrase"?: string; + }; +}; + +export type BuildkiteStep = { + steps?: BuildkiteStep[]; + group?: string; + bwc_template?: boolean; +}; + +export type BuildkitePipeline = { + steps?: BuildkiteStep[]; +}; + +export type EsPipeline = EsPipelineConfig & + BuildkitePipeline & { + name?: string; + }; diff --git a/.buildkite/scripts/lucene-snapshot/get-credentials.sh b/.buildkite/scripts/setup-legacy-vault.sh similarity index 61% rename from .buildkite/scripts/lucene-snapshot/get-credentials.sh rename to .buildkite/scripts/setup-legacy-vault.sh index 042c664384a7f..d84f2a94d5391 100755 --- a/.buildkite/scripts/lucene-snapshot/get-credentials.sh +++ b/.buildkite/scripts/setup-legacy-vault.sh @@ -2,9 +2,6 @@ set -euo pipefail -# WARNING: this script will echo the credentials to the console. It is meant to be called from another script and captured in a variable. -# It should really only be used inside .buildkite/hooks/pre-command - VAULT_ROLE_ID=$(vault read -field=role-id secret/ci/elastic-elasticsearch/legacy-vault-credentials) VAULT_SECRET_ID=$(vault read -field=secret-id secret/ci/elastic-elasticsearch/legacy-vault-credentials) VAULT_ADDR=https://secrets.elastic.co:8200 @@ -12,5 +9,3 @@ VAULT_ADDR=https://secrets.elastic.co:8200 unset VAULT_TOKEN VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id=$VAULT_ROLE_ID secret_id=$VAULT_SECRET_ID) export VAULT_TOKEN - -vault read -format=json aws-elastic/creds/lucene-snapshots diff --git a/.buildkite/scripts/third-party-test-credentials.gcs.sh b/.buildkite/scripts/third-party-test-credentials.gcs.sh new file mode 100755 index 0000000000000..fd1b435ed484b --- /dev/null +++ b/.buildkite/scripts/third-party-test-credentials.gcs.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -euo pipefail + +# Usage: .buildkite/scripts/third-party-test-credentials.gcs.sh + +source .buildkite/scripts/setup-legacy-vault.sh + +vault read -field=private_key_data gcp-elastic-ci-prod/key/elasticsearch-ci-thirdparty-gcs | base64 --decode > "$1" diff --git a/.buildkite/scripts/third-party-test-credentials.sh b/.buildkite/scripts/third-party-test-credentials.sh new file mode 100755 index 0000000000000..c882d61cbade6 --- /dev/null +++ b/.buildkite/scripts/third-party-test-credentials.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +set -euo pipefail + +# You'll notice that most of the variables are exported twice with different names here +# The first/uppercase export is to ensure that Buildkite masks the values in the logs should they accidentally be output +# The second/lowercase export is what the tests expect/require + +if [[ "${USE_3RD_PARTY_AZURE_CREDENTIALS:-}" == "true" ]]; then + json=$(vault read -format=json secret/ci/elastic-elasticsearch/migrated/azure_thirdparty_test_creds) + + AZURE_STORAGE_ACCOUNT_SECRET=$(echo "$json" | jq -r .data.account_id) + export AZURE_STORAGE_ACCOUNT_SECRET + export azure_storage_account="$AZURE_STORAGE_ACCOUNT_SECRET" + + AZURE_STORAGE_KEY=$(echo "$json" | jq -r .data.account_key) + export AZURE_STORAGE_KEY + export azure_storage_key="$AZURE_STORAGE_KEY" +fi + +if [[ "${USE_3RD_PARTY_AZURE_SAS_CREDENTIALS:-}" == "true" ]]; then + json=$(vault read -format=json secret/ci/elastic-elasticsearch/migrated/azure_thirdparty_sas_test_creds) + + AZURE_STORAGE_ACCOUNT_SECRET=$(echo "$json" | jq -r .data.account_id) + export AZURE_STORAGE_ACCOUNT_SECRET + export azure_storage_account="$AZURE_STORAGE_ACCOUNT_SECRET" + + AZURE_STORAGE_SAS_TOKEN=$(echo "$json" | jq -r .data.account_sas_token) + export AZURE_STORAGE_SAS_TOKEN + export azure_storage_sas_token="$AZURE_STORAGE_SAS_TOKEN" +fi + +if [[ "${USE_3RD_PARTY_S3_CREDENTIALS:-}" == "true" ]]; then + json=$(.buildkite/scripts/get-legacy-secret.sh aws-test/creds/elasticsearch-ci-s3) + AMAZON_S3_ACCESS_KEY=$(echo "$json" | jq -r .data.access_key) + export AMAZON_S3_ACCESS_KEY + export amazon_s3_access_key="$AMAZON_S3_ACCESS_KEY" + + AMAZON_S3_SECRET_KEY=$(echo "$json" | jq -r .data.secret_key) + export AMAZON_S3_SECRET_KEY + export amazon_s3_secret_key="$AMAZON_S3_SECRET_KEY" +fi + +if [[ "${USE_3RD_PARTY_GCS_CREDENTIALS:-}" == "true" ]]; then + export google_storage_service_account=$(mktemp) + .buildkite/scripts/third-party-test-credentials.gcs.sh "$google_storage_service_account" +fi + + + +unset json diff --git a/.buildkite/tsconfig.json b/.buildkite/tsconfig.json new file mode 100644 index 0000000000000..1449bc3d931a8 --- /dev/null +++ b/.buildkite/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "lib": ["ESNext"], + "module": "esnext", + "target": "esnext", + "moduleResolution": "bundler", + "moduleDetection": "force", + "allowImportingTsExtensions": true, + "noEmit": true, + "composite": true, + "strict": true, + "downlevelIteration": true, + "skipLibCheck": true, + "jsx": "preserve", + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "allowJs": true, + "types": [ + "bun-types" // add Bun global + ] + } +} diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml index 48a537c33b612..7d0d724d6bbc4 100644 --- a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml +++ b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml @@ -31,9 +31,9 @@ - almalinux-8-packaging builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA - shell: | #!/usr/local/bin/runbld --redirect-stderr - ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructivePackagingTest + ./.ci/scripts/packaging-test.sh destructivePackagingTest diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml index 291ae9da4cd75..134deae255cd5 100644 --- a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml +++ b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml @@ -22,10 +22,10 @@ name: BWC_VERSION builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA JAVA16_HOME=$HOME/.java/openjdk16 - shell: | #!/usr/local/bin/runbld --redirect-stderr - ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION + ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml index 04e48036a8e9e..2d4f372142512 100644 --- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml +++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml @@ -25,9 +25,9 @@ - ^docs/.* - ^x-pack/docs/.* black-list-labels: - - '>test-mute' - - ':Delivery/Packaging' - - 'buildkite-opt-in' + - ">test-mute" + - ":Delivery/Packaging" + - "buildkite-opt-in" axes: - axis: type: label-expression @@ -39,14 +39,14 @@ type: user-defined name: PACKAGING_TASK values: - - 'destructiveDistroTest.docker' - - 'destructiveDistroTest.packages' - - 'destructiveDistroTest.archives' + - "destructiveDistroTest.docker" + - "destructiveDistroTest.packages" + - "destructiveDistroTest.archives" builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA - shell: | #!/usr/local/bin/runbld --redirect-stderr - ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ $PACKAGING_TASK + ./.ci/scripts/packaging-test.sh $PACKAGING_TASK diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml index a7413699ff6c3..af1d3f493eeb0 100644 --- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml +++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml @@ -25,10 +25,10 @@ - ^docs/.* - ^x-pack/docs/.* white-list-labels: - - ':Delivery/Packaging' + - ":Delivery/Packaging" black-list-labels: - - '>test-mute' - - 'buildkite-opt-in' + - ">test-mute" + - "buildkite-opt-in" axes: - axis: type: label-expression @@ -54,14 +54,14 @@ type: user-defined name: PACKAGING_TASK values: - - 'destructiveDistroTest.docker' - - 'destructiveDistroTest.packages' - - 'destructiveDistroTest.archives' + - "destructiveDistroTest.docker" + - "destructiveDistroTest.packages" + - "destructiveDistroTest.archives" builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA - shell: | #!/usr/local/bin/runbld --redirect-stderr - ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ $PACKAGING_TASK + ./.ci/scripts/packaging-test.sh $PACKAGING_TASK diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml index 2b73d0144cab7..19ed5398e3e1d 100644 --- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml +++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml @@ -26,10 +26,10 @@ - ^docs/.* - ^x-pack/docs/.* white-list-labels: - - ':Delivery/Packaging' + - ":Delivery/Packaging" black-list-labels: - - '>test-mute' - - 'buildkite-opt-in' + - ">test-mute" + - "buildkite-opt-in" axes: - axis: type: label-expression @@ -43,7 +43,7 @@ name: "BWC_VERSION" builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA JAVA8_HOME=$HOME/.java/java8 @@ -51,4 +51,4 @@ JAVA16_HOME=$HOME/.java/openjdk16 - shell: | #!/usr/local/bin/runbld --redirect-stderr - ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructiveDistroUpgradeTest.v$BWC_VERSION + ./.ci/scripts/packaging-test.sh destructiveDistroUpgradeTest.v$BWC_VERSION diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh index afe162b23e564..1626255c30b4f 100755 --- a/.ci/scripts/packaging-test.sh +++ b/.ci/scripts/packaging-test.sh @@ -77,5 +77,5 @@ sudo -E env \ --unset=ES_JAVA_HOME \ --unset=JAVA_HOME \ SYSTEM_JAVA_HOME=`readlink -f -n $BUILD_JAVA_HOME` \ - ./gradlew -g $HOME/.gradle --scan --parallel --continue $@ + ./gradlew -g $HOME/.gradle --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@ diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 82c9416515d24..e129cdaa12469 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -229,7 +229,7 @@ private static Page page(String operation) { case "mv_min", "mv_min_ascending" -> { var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); if (operation.endsWith("ascending")) { - builder.mvOrdering(Block.MvOrdering.ASCENDING); + builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } for (int i = 0; i < BLOCK_LENGTH; i++) { builder.beginPositionEntry(); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java index 7e13d1ec0354c..e9f2d20d84195 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java @@ -157,19 +157,19 @@ public void setup() { @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) - public Block adaptive() { - return MultivalueDedupe.dedupeToBlockAdaptive(block); + public void adaptive() { + MultivalueDedupe.dedupeToBlockAdaptive(Block.Ref.floating(block)).close(); } @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) - public Block copyAndSort() { - return MultivalueDedupe.dedupeToBlockUsingCopyAndSort(block); + public void copyAndSort() { + MultivalueDedupe.dedupeToBlockUsingCopyAndSort(Block.Ref.floating(block)).close(); } @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) - public Block copyMissing() { - return MultivalueDedupe.dedupeToBlockUsingCopyMissing(block); + public void copyMissing() { + MultivalueDedupe.dedupeToBlockUsingCopyMissing(Block.Ref.floating(block)).close(); } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java index 9ef4eef2a6924..84f7cec47b737 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -107,6 +108,7 @@ private static Operator operator(String data, int topCount) { ClusterSettings.createBuiltInClusterSettings() ); return new TopNOperator( + BlockFactory.getNonBreakingInstance(), breakerService.getBreaker(CircuitBreaker.REQUEST), topCount, elementTypes, diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 7f9e258abd65a..5711d66fd848f 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -106,7 +106,7 @@ buildScan { // Add SCM information def prId = System.getenv('BUILDKITE_PULL_REQUEST') if (prId != 'false') { - def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git") + def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git").replaceFirst("git://", "https://") value 'Git Commit ID', System.getenv('BUILDKITE_COMMIT') tag "pr/${prId}" tag 'pull-request' diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index cecc5c7806240..1bd11dc2313ba 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -184,6 +184,7 @@ def commonPackageConfig(String type, String architecture) { configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { dirMode 02750 + setgid = true into('/etc') permissionGroup 'elasticsearch' includeEmptyDirs true @@ -194,6 +195,7 @@ def commonPackageConfig(String type, String architecture) { from("${packagingFiles}/etc/elasticsearch") { into('/etc/elasticsearch') dirMode 02750 + setgid = true fileMode 0660 permissionGroup 'elasticsearch' includeEmptyDirs true @@ -240,7 +242,8 @@ def commonPackageConfig(String type, String architecture) { createDirectoryEntry true user u permissionGroup g - dirMode mode + dirMode = mode + setgid = mode == 02750 } } copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index 6d832dfff2758..28a086cf6f8f7 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -9,7 +9,6 @@ package org.elasticsearch.server.cli; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.Strings; @@ -44,7 +43,7 @@ class APMJvmOptions { // tag::noformat private static final Map STATIC_CONFIG = Map.of( // Identifies the version of Elasticsearch in the captured trace data. - "service_version", Version.CURRENT.toString(), + "service_version", Build.current().version(), // Configures a log file to write to. `_AGENT_HOME_` is a placeholder used // by the agent. Don't disable writing to a log file, as the agent will then @@ -53,7 +52,8 @@ class APMJvmOptions { "log_file", "_AGENT_HOME_/../../logs/apm.log", // ES does not use auto-instrumentation. - "instrument", "false" + "instrument", "false", + "enable_experimental_instrumentations", "true" ); /** diff --git a/docs/changelog/100018.yaml b/docs/changelog/100018.yaml new file mode 100644 index 0000000000000..b39089db568c0 --- /dev/null +++ b/docs/changelog/100018.yaml @@ -0,0 +1,5 @@ +pr: 100018 +summary: Improve time-series error and documentation +area: "TSDB" +type: enhancement +issues: [] diff --git a/docs/changelog/100020.yaml b/docs/changelog/100020.yaml new file mode 100644 index 0000000000000..9f97778860eef --- /dev/null +++ b/docs/changelog/100020.yaml @@ -0,0 +1,6 @@ +pr: 100020 +summary: "[CI] `SearchResponseTests#testSerialization` failing resolved" +area: Search +type: bug +issues: + - 100005 diff --git a/docs/changelog/100064.yaml b/docs/changelog/100064.yaml new file mode 100644 index 0000000000000..f595b7e8e0705 --- /dev/null +++ b/docs/changelog/100064.yaml @@ -0,0 +1,5 @@ +pr: 100064 +summary: Update the elastic-apm-agent version +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/99584.yaml b/docs/changelog/99584.yaml new file mode 100644 index 0000000000000..229e3d8024506 --- /dev/null +++ b/docs/changelog/99584.yaml @@ -0,0 +1,5 @@ +pr: 99584 +summary: Adding an option for trained models to be platform specific +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/99604.yaml b/docs/changelog/99604.yaml new file mode 100644 index 0000000000000..7b473a056d608 --- /dev/null +++ b/docs/changelog/99604.yaml @@ -0,0 +1,5 @@ +pr: 99604 +summary: Show concrete error when enrich index not exist rather than NPE +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/99712.yaml b/docs/changelog/99712.yaml new file mode 100644 index 0000000000000..c5fa1ac1e64ec --- /dev/null +++ b/docs/changelog/99712.yaml @@ -0,0 +1,5 @@ +pr: 99712 +summary: Make downsample target index replicas configurable +area: Downsampling +type: bug +issues: [] diff --git a/docs/changelog/99832.yaml b/docs/changelog/99832.yaml new file mode 100644 index 0000000000000..9bd83591ba920 --- /dev/null +++ b/docs/changelog/99832.yaml @@ -0,0 +1,5 @@ +pr: 99832 +summary: APM Metering API +area: Infra/Core +type: enhancement +issues: [] diff --git a/docs/changelog/99912.yaml b/docs/changelog/99912.yaml new file mode 100644 index 0000000000000..06f0f9baa9661 --- /dev/null +++ b/docs/changelog/99912.yaml @@ -0,0 +1,6 @@ +pr: 99912 +summary: Represent histogram value count as long +area: Aggregations +type: enhancement +issues: + - 99820 diff --git a/docs/changelog/99938.yaml b/docs/changelog/99938.yaml new file mode 100644 index 0000000000000..4349b73516cae --- /dev/null +++ b/docs/changelog/99938.yaml @@ -0,0 +1,5 @@ +pr: 99938 +summary: "Prune unnecessary information from TransportNodesInfoAction.NodeInfoRequest" +area: Stats +type: enhancement +issues: [99744] diff --git a/docs/changelog/99947.yaml b/docs/changelog/99947.yaml new file mode 100644 index 0000000000000..61996c8fde92b --- /dev/null +++ b/docs/changelog/99947.yaml @@ -0,0 +1,5 @@ +pr: 99947 +summary: GET `_data_stream` displays both ILM and DSL information +area: Data streams +type: feature +issues: [] diff --git a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc index 54638083b1053..d93df55118a8b 100644 --- a/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/time-series-aggregation.asciidoc @@ -67,8 +67,6 @@ PUT /my-time-series-index-0/_bulk -------------------------------------------------- // NOTCONSOLE -////////////////////////// - To perform a time series aggregation, specify "time_series" as the aggregation type. When the boolean "keyed" is true, each bucket is given a unique key. @@ -85,8 +83,6 @@ GET /_search -------------------------------------------------- // NOTCONSOLE -////////////////////////// - This will return all results in the time series, however a more typical query will use sub aggregations to reduce the date returned to something more relevant. diff --git a/docs/reference/data-streams/change-mappings-and-settings.asciidoc b/docs/reference/data-streams/change-mappings-and-settings.asciidoc index 461addf65c53c..3922ef018a713 100644 --- a/docs/reference/data-streams/change-mappings-and-settings.asciidoc +++ b/docs/reference/data-streams/change-mappings-and-settings.asciidoc @@ -573,15 +573,21 @@ stream's oldest backing index. "indices": [ { "index_name": ".ds-my-data-stream-2099.03.07-000001", <1> - "index_uuid": "Gpdiyq8sRuK9WuthvAdFbw" + "index_uuid": "Gpdiyq8sRuK9WuthvAdFbw", + "prefer_ilm": true, + "managed_by": "Unmanaged" }, { "index_name": ".ds-my-data-stream-2099.03.08-000002", - "index_uuid": "_eEfRrFHS9OyhqWntkgHAQ" + "index_uuid": "_eEfRrFHS9OyhqWntkgHAQ", + "prefer_ilm": true, + "managed_by": "Unmanaged" } ], "generation": 2, "status": "GREEN", + "next_generation_managed_by": "Unmanaged", + "prefer_ilm": true, "template": "my-data-stream-template", "hidden": false, "system": false, diff --git a/docs/reference/data-streams/downsampling-manual.asciidoc b/docs/reference/data-streams/downsampling-manual.asciidoc index 6b98816c2cf56..cc74e98b258de 100644 --- a/docs/reference/data-streams/downsampling-manual.asciidoc +++ b/docs/reference/data-streams/downsampling-manual.asciidoc @@ -358,11 +358,15 @@ This returns: "indices": [ { "index_name": ".ds-my-data-stream-2023.07.26-000001", <1> - "index_uuid": "ltOJGmqgTVm4T-Buoe7Acg" + "index_uuid": "ltOJGmqgTVm4T-Buoe7Acg", + "prefer_ilm": true, + "managed_by": "Data stream lifecycle" } ], "generation": 1, "status": "GREEN", + "next_generation_managed_by": "Data stream lifecycle", + "prefer_ilm": true, "template": "my-data-stream-template", "hidden": false, "system": false, diff --git a/docs/reference/esql/esql-functions.asciidoc b/docs/reference/esql/esql-functions.asciidoc index 9d656760fe12e..b921719fc097b 100644 --- a/docs/reference/esql/esql-functions.asciidoc +++ b/docs/reference/esql/esql-functions.asciidoc @@ -48,6 +48,7 @@ these functions: * <> * <> * <> +* <> * <> * <> * <> @@ -112,6 +113,7 @@ include::functions/mv_sum.asciidoc[] include::functions/now.asciidoc[] include::functions/pi.asciidoc[] include::functions/pow.asciidoc[] +include::functions/replace.asciidoc[] include::functions/right.asciidoc[] include::functions/round.asciidoc[] include::functions/rtrim.asciidoc[] diff --git a/docs/reference/esql/functions/replace.asciidoc b/docs/reference/esql/functions/replace.asciidoc new file mode 100644 index 0000000000000..554728d0fd205 --- /dev/null +++ b/docs/reference/esql/functions/replace.asciidoc @@ -0,0 +1,16 @@ +[[esql-replace]] +=== `REPLACE` +The function substitutes in the string (1st argument) any match of the regular expression (2nd argument) with the replacement string (3rd argument). + +If any of the arguments are `NULL`, the result is `NULL`. + +. This example replaces an occurrence of the word "World" with the word "Universe": + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=replaceString] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=replaceString-result] +|=== diff --git a/docs/reference/esql/functions/signature/replace.svg b/docs/reference/esql/functions/signature/replace.svg new file mode 100644 index 0000000000000..7c86c00d019cb --- /dev/null +++ b/docs/reference/esql/functions/signature/replace.svg @@ -0,0 +1 @@ +REPLACE(arg1,arg2,arg3) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/mv_concat.asciidoc b/docs/reference/esql/functions/types/mv_concat.asciidoc index f1f744dbe4126..2836799f335e8 100644 --- a/docs/reference/esql/functions/types/mv_concat.asciidoc +++ b/docs/reference/esql/functions/types/mv_concat.asciidoc @@ -2,4 +2,7 @@ |=== arg1 | arg2 | result keyword | keyword | keyword +keyword | text | keyword +text | keyword | keyword +text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc new file mode 100644 index 0000000000000..3ef1fac79e4f6 --- /dev/null +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +arg1 | arg2 | arg3 | result +keyword | keyword | keyword | keyword +|=== diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc index ef2cf7eeee946..36998e7aa5fa3 100644 --- a/docs/reference/indices/get-data-stream.asciidoc +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -225,7 +225,7 @@ cluster can not write into this data stream or change its mappings. `lifecycle`:: (object) -Functionality in preview:[]. Contains the configuration for the data stream lifecycle management of this data stream. +Contains the configuration for the data stream lifecycle management of this data stream. + .Properties of `lifecycle` [%collapsible%open] @@ -265,11 +265,17 @@ The API returns the following response: "indices": [ { "index_name": ".ds-my-data-stream-2099.03.07-000001", - "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg" + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management" }, { "index_name": ".ds-my-data-stream-2099.03.08-000002", - "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw" + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management" } ], "generation": 2, @@ -277,6 +283,8 @@ The API returns the following response: "my-meta-field": "foo" }, "status": "GREEN", + "next_generation_managed_by": "Index Lifecycle Management", + "prefer_ilm": true, "template": "my-index-template", "ilm_policy": "my-lifecycle-policy", "hidden": false, @@ -292,7 +300,10 @@ The API returns the following response: "indices": [ { "index_name": ".ds-my-data-stream-two-2099.03.08-000001", - "index_uuid": "3liBu2SYS5axasRt6fUIpA" + "index_uuid": "3liBu2SYS5axasRt6fUIpA", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management" } ], "generation": 1, @@ -300,6 +311,8 @@ The API returns the following response: "my-meta-field": "foo" }, "status": "YELLOW", + "next_generation_managed_by": "Index Lifecycle Management", + "prefer_ilm": true, "template": "my-index-template", "ilm_policy": "my-lifecycle-policy", "hidden": false, diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc new file mode 100644 index 0000000000000..874bfa64d3551 --- /dev/null +++ b/docs/reference/inference/delete-inference.asciidoc @@ -0,0 +1,57 @@ +[role="xpack"] +[[delete-inference-api]] +=== Delete {infer} API + +Deletes an {infer} model deployment. + + +[discrete] +[[delete-inference-api-request]] +==== {api-request-title} + +`DELETE /_inference//` + +[discrete] +[[delete-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + + +[discrete] +[[delete-inference-api-path-params]] +==== {api-path-parms-title} + +:: +(Required, string) +The unique identifier of the {infer} model to delete. + +:: +(Required, string) +The type of {infer} task that the model performs. + + +[discrete] +[[delete-inference-api-example]] +==== {api-examples-title} + +The following API call deletes the `my-elser-model` {infer} model that can +perform `sparse_embedding` tasks. + + +[source,console] +------------------------------------------------------------ +DELETE /_inference/sparse_embedding/my-elser-model +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + +[source,console-result] +------------------------------------------------------------ +{ + "acknowledged": true +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc new file mode 100644 index 0000000000000..7e32bd05b5f56 --- /dev/null +++ b/docs/reference/inference/get-inference.asciidoc @@ -0,0 +1,79 @@ +[role="xpack"] +[[get-inference-api]] +=== Get {infer} API + +Retrieves {infer} model information. + +[discrete] +[[get-inference-api-request]] +==== {api-request-title} + +`GET /_inference/_all` + +`GET /_inference//_all` + +`GET /_inference//` + +[discrete] +[[get-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + +[discrete] +[[get-inference-api-desc]] +==== {api-description-title} + +You can get information in a single API request for: + +* a single {infer} model by providing the task type and the model ID, +* all of the {infer} models for a certain task type by providing the task type +and a wildcard expression, +* all of the {infer} models by using a wildcard expression. + + +[discrete] +[[get-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Optional, string) +The unique identifier of the {infer} model. + + +``:: +(Optional, string) +The type of {infer} task that the model performs. + + +[discrete] +[[get-inference-api-example]] +==== {api-examples-title} + +The following API call retrives information about the `my-elser-model` {infer} +model that can perform `sparse_embedding` tasks. + + +[source,console] +------------------------------------------------------------ +GET _inference/sparse_embedding/my-elser-model +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + +[source,console-result] +------------------------------------------------------------ +{ + "model_id": "my-elser-model", + "task_type": "sparse_embedding", + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc new file mode 100644 index 0000000000000..ec1f01bc4d093 --- /dev/null +++ b/docs/reference/inference/inference-apis.asciidoc @@ -0,0 +1,16 @@ +[role="xpack"] +[[inference-apis]] +== {infer-cap} APIs + +You can use the following APIs to manage {infer} models and perform {infer}: + +* <> +* <> +* <> +* <> + + +include::delete-inference.asciidoc[] +include::get-inference.asciidoc[] +include::post-inference.asciidoc[] +include::put-inference.asciidoc[] \ No newline at end of file diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc new file mode 100644 index 0000000000000..99dd4a059519f --- /dev/null +++ b/docs/reference/inference/post-inference.asciidoc @@ -0,0 +1,97 @@ +[role="xpack"] +[[post-inference-api]] +=== Perform inference API + +Performs an inference task on an input text by using an {infer} model. + + +[discrete] +[[post-inference-api-request]] +==== {api-request-title} + +`POST /_inference//` + + +[discrete] +[[post-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + + +[discrete] +[[post-inference-api-desc]] +==== {api-description-title} + +The perform {infer} API enables you to use {infer} models to perform specific +tasks on data that you provide as an input. The API returns a response with the +resutls of the tasks. The {infer} model you use can perform one specific task +that has been defined when the model was created with the <>. + + +[discrete] +[[post-inference-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +The unique identifier of the {infer} model. + + +``:: +(Required, string) +The type of {infer} task that the model performs. + + +[discrete] +[[post-inference-api-request-body]] +== {api-request-body-title} + +`input`:: +(Required, string) +The text on which you want to perform the {infer} task. + + +[discrete] +[[post-inference-api-example]] +==== {api-examples-title} + +The following example performs sparse embedding on the example sentence. + + +[source,console] +------------------------------------------------------------ +POST _inference/sparse_embedding/my-elser-model +{ + "input": "The sky above the port was the color of television tuned to a dead channel." +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + + +[source,console-result] +------------------------------------------------------------ +{ + "sparse_embedding": { + "port": 2.1259406, + "sky": 1.7073475, + "color": 1.6922266, + "dead": 1.6247464, + "television": 1.3525393, + "above": 1.2425821, + "tuned": 1.1440028, + "colors": 1.1218185, + "tv": 1.0111054, + "ports": 1.0067928, + "poem": 1.0042328, + "channel": 0.99471164, + "tune": 0.96235967, + "scene": 0.9020516, + (...) + } +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc new file mode 100644 index 0000000000000..c5ccd6a57a8dd --- /dev/null +++ b/docs/reference/inference/put-inference.asciidoc @@ -0,0 +1,104 @@ +[role="xpack"] +[[put-inference-api]] +=== Create {infer} API + +Creates a model to perform an {infer} task. + + +[discrete] +[[put-inference-api-request]] +==== {api-request-title} + +`PUT /_inference//` + + +[discrete] +[[put-inference-api-prereqs]] +==== {api-prereq-title} + +* Requires the `manage` <>. + +[discrete] +[[put-inference-api-desc]] +==== {api-description-title} + +The create {infer} API enables you to create and configure an {infer} model to +perform a specific {infer} task. + + +[discrete] +[[put-inference-api-path-params]] +==== {api-path-parms-title} + + +``:: +(Required, string) +The unique identifier of the model. + +``:: +(Required, string) +The type of the {infer} task that the model will perform. Available task types: +* `sparse_embedding`, +* `text_embedding`. + + +[discrete] +[[put-inference-api-request-body]] +== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. +Available services: +* `elser`, +* `elser_mlnode`. + +`service_settings`:: +(Required, object) +Settings used to install the {infer} model. These settings are specific to the +`service` you specified. + +`task_settings`:: +(Optional, object) +Settings to configure the {infer} task. These settings are specific to the +`` you specified. + + +[discrete] +[[put-inference-api-example]] +==== {api-examples-title} + +The following example shows how to create an {infer} model called +`my-elser-model` to perform a `sparse_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-model +{ + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +Example response: + +[source,console-result] +------------------------------------------------------------ +{ + "model_id": "my-elser-model", + "task_type": "sparse_embedding", + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// NOTCONSOLE diff --git a/docs/reference/mapping/types/histogram.asciidoc b/docs/reference/mapping/types/histogram.asciidoc index 70164dba236ce..38887cef013b9 100644 --- a/docs/reference/mapping/types/histogram.asciidoc +++ b/docs/reference/mapping/types/histogram.asciidoc @@ -10,7 +10,7 @@ This data is defined using two paired arrays: * A `values` array of <> numbers, representing the buckets for the histogram. These values must be provided in ascending order. -* A corresponding `counts` array of <> numbers, representing how +* A corresponding `counts` array of <> numbers, representing how many values fall into each bucket. These numbers must be positive or zero. Because the elements in the `values` array correspond to the elements in the @@ -138,5 +138,5 @@ PUT my-index-000001/_doc/2 <1> Values for each bucket. Values in the array are treated as doubles and must be given in increasing order. For <> histograms this value represents the mean value. In case of HDR histograms this represents the value iterated to. -<2> Count for each bucket. Values in the arrays are treated as integers and must be positive or zero. +<2> Count for each bucket. Values in the arrays are treated as long integers and must be positive or zero. Negative values will be rejected. The relation between a bucket and a count is given by the position in the array. diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 82fd1872e6a76..7da46e13a8ce4 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -3,7 +3,9 @@ = Create trained models API [subs="attributes"] ++++ + Create trained models + ++++ Creates a trained model. @@ -1645,6 +1647,16 @@ Appropriate types are: * `pytorch`: The stored definition is a PyTorch (specifically a TorchScript) model. Currently only NLP models are supported. For more information, refer to {ml-docs}/ml-nlp.html[{nlp-cap}]. -- +`platform_architecture`:: +(Optional, string) +If the model only works on one platform, because it is heavily +optimized for a particular processor architecture and OS combination, +then this field specifies which. The format of the string must match +the platform identifiers used by Elasticsearch, so one of, `linux-x86_64`, +`linux-aarch64`, `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. +For portable models (those that work independent of processor architecture or +OS features), leave this field unset. + `tags`:: (Optional, string) diff --git a/docs/reference/modules/network/threading.asciidoc b/docs/reference/modules/network/threading.asciidoc index 87e7e2371472b..abf00b521b5cc 100644 --- a/docs/reference/modules/network/threading.asciidoc +++ b/docs/reference/modules/network/threading.asciidoc @@ -107,3 +107,12 @@ However, this API itself sends network messages so may not work correctly if the `transport_worker` threads are too busy. It is more reliable to use `jstack` to obtain stack dumps or use Java Flight Recorder to obtain a profiling trace. These tools are independent of any work the JVM is performing. + +It may also be possible to identify some reasons for delays from the server +logs, particularly looking at warnings from +`org.elasticsearch.transport.InboundHandler` and +`org.elasticsearch.transport.OutboundHandler`. Warnings about long processing +times from the `InboundHandler` are particularly indicative of incorrect +threading behaviour, whereas the transmission time reported by the +`OutboundHandler` includes time spent waiting for network congestion and the +`transport_worker` thread is free to do other work during this time. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 8e1023c47b929..4ec8c203bbef9 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -1932,3 +1932,73 @@ Refer to <>. === Configure roles and users for remote clusters Refer to <>. + +[role="exclude",id="ingest-pipeline-search"] +=== Ingest pipelines for Search indices + +coming::[8.11.0] + +[role="exclude",id="ingest-pipeline-search-inference"] +=== Inference processing for Search indices + +coming::[8.11.0] + +[id="ingest-pipeline-search-inference-update-mapping"] +==== Update mapping + +coming::[8.11.0] + +[role="exclude",id="nlp-example"] +=== Tutorial: Natural language processing (NLP) + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-overview"] +=== Elastic Behavioral Analytics + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-start"] +=== Get started with Behavioral Analytics + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-api"] +=== Behavioral Analytics APIs + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-event"] +=== View Behavioral Analytics Events + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-event-reference"] +=== Behavioral Analytics events reference + +coming::[8.11.0] + +[role="exclude",id="behavioral-analytics-cors"] +=== Set up CORS for Behavioral Analytics + +coming::[8.11.0] + +[role="exclude",id="search-application-overview"] +=== Elastic Search Applications + +coming::[8.11.0] + +[role="exclude",id="search-application-api"] +=== Search Applications search API and templates + +coming::[8.11.0] + +[role="exclude",id="search-application-client"] +=== Search Applications client + +coming::[8.11.0] + +[role="exclude",id="search-application-security"] +=== Search Applications security + +coming::[8.11.0] diff --git a/docs/reference/release-notes/8.9.2.asciidoc b/docs/reference/release-notes/8.9.2.asciidoc index d4244eab27645..6b00405261daf 100644 --- a/docs/reference/release-notes/8.9.2.asciidoc +++ b/docs/reference/release-notes/8.9.2.asciidoc @@ -3,6 +3,25 @@ Also see <>. +[float] +[[security-updates-8.9.2]] +=== Security updates + +* {es} generally filters out sensitive information and credentials before +logging to the audit log. It was found that this filtering was not applied when +requests to {es} use certain deprecated `_xpack/security` URIs for APIs. The +impact of this flaw is that sensitive information, such as passwords and tokens, +might be printed in cleartext in {es} audit logs. Note that audit logging is +disabled by default and needs to be explicitly enabled. Even when audit logging +is enabled, request bodies that could contain sensitive information are not +printed to the audit log unless explicitly configured. ++ +The issue is resolved in {es} 8.9.2. ++ +For more information, see our related +https://discuss.elastic.co/t/elasticsearch-8-9-2-and-7-17-13-security-update/342479[security +announcement]. + [[bug-8.9.2]] [float] === Bug fixes diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 1da39333db43e..b8ad9d9a0736e 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -28,8 +28,9 @@ not be included yet. * <> * <> * <> -* <> +* <> * <> +* <> * <> * <> * <> @@ -74,8 +75,9 @@ include::{es-repo-dir}/text-structure/apis/find-structure.asciidoc[leveloffset=+ include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/indices.asciidoc[] include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[] -include::{es-repo-dir}/ingest/apis/index.asciidoc[] +include::{es-repo-dir}/inference/inference-apis.asciidoc[] include::info.asciidoc[] +include::{es-repo-dir}/ingest/apis/index.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{es-repo-dir}/rest-api/logstash/index.asciidoc[] include::{es-repo-dir}/ml/common/apis/index.asciidoc[] diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index 0f07f1f4128fe..082bb2ae2e020 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -14,7 +14,7 @@ The instructions in this tutorial shows you how to use ELSER to perform semantic search on your data. NOTE: Only the first 512 extracted tokens per field are considered during -semantic search with ELSER v1. Refer to +semantic search with ELSER. Refer to {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[this page] for more information. @@ -44,15 +44,16 @@ you must provide suitably sized nodes yourself. First, the mapping of the destination index - the index that contains the tokens that the model created based on your text - must be created. The destination -index must have a field with the <> field type -to index the ELSER output. +index must have a field with the +<> field type to index the +ELSER output. -NOTE: ELSER output must be ingested into a field with the `rank_features` field -type. Otherwise, {es} interprets the token-weight pairs as a massive amount of -fields in a document. If you get an error similar to this +NOTE: ELSER output must be ingested into a field with the `sparse_vector` or +`rank_features` field type. Otherwise, {es} interprets the token-weight pairs as +a massive amount of fields in a document. If you get an error similar to this `"Limit of total fields [1000] has been exceeded while adding new fields"` then the ELSER output field is not mapped properly and it has a field type different -than `rank_features`. +than `sparse_vector` or `rank_features`. [source,console] ---- @@ -61,7 +62,7 @@ PUT my-index "mappings": { "properties": { "ml.tokens": { <1> - "type": "rank_features" <2> + "type": "sparse_vector" <2> }, "text": { <3> "type": "text" <4> @@ -72,7 +73,7 @@ PUT my-index ---- // TEST[skip:TBD] <1> The name of the field to contain the generated tokens. -<2> The field to contain the tokens is a `rank_features` field. +<2> The field to contain the tokens is a `sparse_vector` field. <3> The name of the field from which to create the sparse vector representation. In this example, the name of the field is `text`. <4> The field type which is text in this example. @@ -90,12 +91,12 @@ that is being ingested in the pipeline. [source,console] ---- -PUT _ingest/pipeline/elser-v1-test +PUT _ingest/pipeline/elser-v2-test { "processors": [ { "inference": { - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "target_field": "ml", "field_map": { <1> "text": "text_field" @@ -155,7 +156,7 @@ POST _reindex?wait_for_completion=false }, "dest": { "index": "my-index", - "pipeline": "elser-v1-test" + "pipeline": "elser-v2-test" } } ---- @@ -192,7 +193,7 @@ GET my-index/_search "query":{ "text_expansion":{ "ml.tokens":{ - "model_id":".elser_model_1", + "model_id":".elser_model_2", "model_text":"How to avoid muscle soreness after running?" } } @@ -236,7 +237,7 @@ weights. "exercises":0.36694175, (...) }, - "model_id":".elser_model_1" + "model_id":".elser_model_2" } } }, @@ -276,7 +277,7 @@ GET my-index/_search "text_expansion": { "ml.tokens": { "model_text": "How to avoid muscle soreness after running?", - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "boost": 1 <2> } } @@ -342,7 +343,7 @@ PUT my-index }, "properties": { "ml.tokens": { - "type": "rank_features" + "type": "sparse_vector" }, "text": { "type": "text" @@ -359,7 +360,7 @@ PUT my-index ==== Further reading * {ml-docs}/ml-nlp-elser.html[How to download and deploy ELSER] -* {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[ELSER v1 limitation] +* {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[ELSER limitation] * https://www.elastic.co/blog/may-2023-launch-information-retrieval-elasticsearch-ai-model[Improving information retrieval in the Elastic Stack: Introducing Elastic Learned Sparse Encoder, our new retrieval model] [discrete] diff --git a/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc b/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc index 228b7a9202341..0228078e8ce39 100644 --- a/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/field-mappings.asciidoc @@ -1,15 +1,15 @@ // tag::elser[] ELSER produces token-weight pairs as output from the input text and the query. -The {es} <> field type can store these +The {es} <> field type can store these token-weight pairs as numeric feature vectors. The index must have a field with -the `rank_features` field type to index the tokens that ELSER generates. +the `sparse_vector` field type to index the tokens that ELSER generates. To create a mapping for your ELSER index, refer to the <> of the tutorial. The example shows how to create an index mapping for `my-index` that defines the `my_embeddings.tokens` field - which will contain the ELSER output - as a -`rank_features` field. +`sparse_vector` field. [source,console] ---- @@ -18,7 +18,7 @@ PUT my-index "mappings": { "properties": { "my_embeddings.tokens": { <1> - "type": "rank_features" <2> + "type": "sparse_vector" <2> }, "my_text_field": { <3> "type": "text" <4> @@ -28,7 +28,7 @@ PUT my-index } ---- <1> The name of the field that will contain the tokens generated by ELSER. -<2> The field that contains the tokens must be a `rank_features` field. +<2> The field that contains the tokens must be a `sparse_vector` field. <3> The name of the field from which to create the sparse vector representation. In this example, the name of the field is `my_text_field`. <4> The field type is `text` in this example. diff --git a/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc b/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc index 0adfda5c2bff9..786f40fe141bd 100644 --- a/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/generate-embeddings.asciidoc @@ -21,7 +21,7 @@ PUT _ingest/pipeline/my-text-embeddings-pipeline "processors": [ { "inference": { - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "target_field": "my_embeddings", "field_map": { <1> "my_text_field": "text_field" diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 26fc25c2385c8..a99bdf3c8722b 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -22,7 +22,7 @@ GET my-index/_search "query": { "text_expansion": { "my_embeddings.tokens": { - "model_id": ".elser_model_1", + "model_id": ".elser_model_2", "model_text": "the query string" } } diff --git a/docs/reference/tab-widgets/semantic-search/search.asciidoc b/docs/reference/tab-widgets/semantic-search/search.asciidoc index 425b797789270..d1cd31fbe4309 100644 --- a/docs/reference/tab-widgets/semantic-search/search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/search.asciidoc @@ -12,7 +12,7 @@ GET my-index/_search "query":{ "text_expansion":{ "my_embeddings.tokens":{ <1> - "model_id":".elser_model_1", + "model_id":".elser_model_2", "model_text":"the query string" } } @@ -20,7 +20,7 @@ GET my-index/_search } ---- // TEST[skip:TBD] -<1> The field of type `rank_features`. +<1> The field of type `sparse_vector`. // end::elser[] diff --git a/docs/reference/troubleshooting/network-timeouts.asciidoc b/docs/reference/troubleshooting/network-timeouts.asciidoc index c15c5ee0d58a5..ab60eeff1b1a9 100644 --- a/docs/reference/troubleshooting/network-timeouts.asciidoc +++ b/docs/reference/troubleshooting/network-timeouts.asciidoc @@ -47,5 +47,7 @@ since it doesn't require any JVM threads. The threads involved in discovery and cluster membership are mainly `transport_worker` and `cluster_coordination` threads, for which there should never be a long wait. There may also be evidence of long waits for threads in -the {es} logs. See <> for more information. +the {es} logs, particularly looking at warning logs from +`org.elasticsearch.transport.InboundHandler`. See +<> for more information. end::troubleshooting-network-timeouts-threads[] diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 97cefe4df8f67..4de1dc680064a 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -15,7 +15,7 @@ bytebuddy = "net.bytebuddy:byte-buddy:1.12.10" checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" -docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.16.12" +docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" forbiddenApis = "de.thetaphi:forbiddenapis:3.5.1" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 556da14241dcd..7209c4478d159 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -69,11 +69,11 @@ - - - - - + + + + + @@ -109,9 +109,9 @@ - - - + + + diff --git a/libs/core/src/main/java/org/elasticsearch/core/Releasables.java b/libs/core/src/main/java/org/elasticsearch/core/Releasables.java index b8d1a9a542779..c2b48c4706573 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/Releasables.java +++ b/libs/core/src/main/java/org/elasticsearch/core/Releasables.java @@ -89,7 +89,7 @@ private static void close(boolean success, Releasable... releasables) { * // the resources will be released when reaching here * */ - public static Releasable wrap(final Iterable releasables) { + public static Releasable wrap(final Iterable releasables) { return new Releasable() { @Override public void close() { diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle index c8619c97d1068..c9002a71bf746 100644 --- a/modules/apm/build.gradle +++ b/modules/apm/build.gradle @@ -18,7 +18,7 @@ dependencies { implementation "io.opentelemetry:opentelemetry-api:${otelVersion}" implementation "io.opentelemetry:opentelemetry-context:${otelVersion}" implementation "io.opentelemetry:opentelemetry-semconv:${otelVersion}-alpha" - runtimeOnly "co.elastic.apm:elastic-apm-agent:1.36.0" + runtimeOnly "co.elastic.apm:elastic-apm-agent:1.43.0" } tasks.named("dependencyLicenses").configure { diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index be59eda4a63c2..935c4958ba3d7 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -27,6 +27,7 @@ import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; import org.elasticsearch.telemetry.apm.internal.APMTelemetryProvider; +import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; @@ -97,13 +98,16 @@ public Collection createComponents( apmAgentSettings.syncAgentSystemProperties(settings); apmAgentSettings.addClusterSettingsListeners(clusterService, telemetryProvider.get()); - return List.of(apmTracer); + final APMMeter apmMeter = telemetryProvider.get().getMeter(); + + return List.of(apmTracer, apmMeter); } @Override public List> getSettings() { return List.of( APMAgentSettings.APM_ENABLED_SETTING, + APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING, APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING, APMAgentSettings.APM_TRACING_SANITIZE_FIELD_NAMES, diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 75ca94bb13ad6..e4a194ebe0172 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; import java.security.AccessController; @@ -40,14 +41,24 @@ public class APMAgentSettings { * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent * config file, as then their values could not be overridden dynamically via system properties. */ - static Map APM_AGENT_DEFAULT_SETTINGS = Map.of("transaction_sample_rate", "0.2"); + static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( + "transaction_sample_rate", + "0.2", + "enable_experimental_instrumentations", + "true" + ); public void addClusterSettingsListeners(ClusterService clusterService, APMTelemetryProvider apmTelemetryProvider) { final ClusterSettings clusterSettings = clusterService.getClusterSettings(); final APMTracer apmTracer = apmTelemetryProvider.getTracer(); + final APMMeter apmMeter = apmTelemetryProvider.getMeter(); clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, enabled -> { apmTracer.setEnabled(enabled); + this.setAgentSetting("instrument", Boolean.toString(enabled)); + }); + clusterSettings.addSettingsUpdateConsumer(TELEMETRY_METRICS_ENABLED_SETTING, enabled -> { + apmMeter.setEnabled(enabled); // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to // minimise its impact to a running Elasticsearch. this.setAgentSetting("recording", Boolean.toString(enabled)); @@ -106,8 +117,10 @@ public void setAgentSetting(String key, String value) { private static final List PROHIBITED_AGENT_KEYS = List.of( // ES generates a config file and sets this value "config_file", - // ES controls this via `tracing.apm.enabled` - "recording" + // ES controls this via `telemetry.metrics.enabled` + "recording", + // ES controls this via `apm.enabled` + "instrument" ); public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( @@ -164,6 +177,13 @@ public void setAgentSetting(String key, String value) { NodeScope ); + public static final Setting TELEMETRY_METRICS_ENABLED_SETTING = Setting.boolSetting( + "telemetry.metrics.enabled", + false, + OperatorDynamic, + NodeScope + ); + public static final Setting APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( APM_SETTING_PREFIX + "secret_token", null diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java index 495afd43bf176..ae9d91cc6ec51 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java @@ -10,19 +10,27 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; public class APMTelemetryProvider implements TelemetryProvider { private final Settings settings; private final APMTracer apmTracer; + private final APMMeter apmMeter; public APMTelemetryProvider(Settings settings) { this.settings = settings; apmTracer = new APMTracer(settings); + apmMeter = new APMMeter(settings); } @Override public APMTracer getTracer() { return apmTracer; } + + @Override + public APMMeter getMeter() { + return apmMeter; + } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java new file mode 100644 index 0000000000000..0a8d425579ca2 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.telemetry.apm.internal.APMTelemetryProvider; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.function.Supplier; + +import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING; + +public class APMMeter extends AbstractLifecycleComponent implements org.elasticsearch.telemetry.metric.Meter { + private final Instruments instruments; + + private final Supplier otelMeterSupplier; + private final Supplier noopMeterSupplier; + + private volatile boolean enabled; + + public APMMeter(Settings settings) { + this(settings, APMMeter.otelMeter(), APMMeter.noopMeter()); + } + + public APMMeter(Settings settings, Supplier otelMeterSupplier, Supplier noopMeterSupplier) { + this.enabled = TELEMETRY_METRICS_ENABLED_SETTING.get(settings); + this.otelMeterSupplier = otelMeterSupplier; + this.noopMeterSupplier = noopMeterSupplier; + this.instruments = new Instruments(enabled ? createOtelMeter() : createNoopMeter()); + } + + /** + * @see org.elasticsearch.telemetry.apm.internal.APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider) + */ + public void setEnabled(boolean enabled) { + this.enabled = enabled; + if (enabled) { + instruments.setProvider(createOtelMeter()); + } else { + instruments.setProvider(createNoopMeter()); + } + } + + @Override + protected void doStart() {} + + @Override + protected void doStop() { + instruments.setProvider(createNoopMeter()); + } + + @Override + protected void doClose() {} + + @Override + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + return instruments.registerDoubleCounter(name, description, unit); + } + + @Override + public DoubleCounter getDoubleCounter(String name) { + return instruments.getDoubleCounter(name); + } + + @Override + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + return instruments.registerDoubleUpDownCounter(name, description, unit); + } + + @Override + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return instruments.getDoubleUpDownCounter(name); + } + + @Override + public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + return instruments.registerDoubleGauge(name, description, unit); + } + + @Override + public DoubleGauge getDoubleGauge(String name) { + return instruments.getDoubleGauge(name); + } + + @Override + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + return instruments.registerDoubleHistogram(name, description, unit); + } + + @Override + public DoubleHistogram getDoubleHistogram(String name) { + return instruments.getDoubleHistogram(name); + } + + @Override + public LongCounter registerLongCounter(String name, String description, String unit) { + return instruments.registerLongCounter(name, description, unit); + } + + @Override + public LongCounter getLongCounter(String name) { + return instruments.getLongCounter(name); + } + + @Override + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + return instruments.registerLongUpDownCounter(name, description, unit); + } + + @Override + public LongUpDownCounter getLongUpDownCounter(String name) { + return instruments.getLongUpDownCounter(name); + } + + @Override + public LongGauge registerLongGauge(String name, String description, String unit) { + return instruments.registerLongGauge(name, description, unit); + } + + @Override + public LongGauge getLongGauge(String name) { + return instruments.getLongGauge(name); + } + + @Override + public LongHistogram registerLongHistogram(String name, String description, String unit) { + return instruments.registerLongHistogram(name, description, unit); + } + + @Override + public LongHistogram getLongHistogram(String name) { + return instruments.getLongHistogram(name); + } + + Meter createOtelMeter() { + assert this.enabled; + return AccessController.doPrivileged((PrivilegedAction) otelMeterSupplier::get); + } + + private Meter createNoopMeter() { + return noopMeterSupplier.get(); + } + + private static Supplier noopMeter() { + return () -> OpenTelemetry.noop().getMeter("noop"); + } + + // to be used within doPrivileged block + private static Supplier otelMeter() { + var openTelemetry = GlobalOpenTelemetry.get(); + var meter = openTelemetry.getMeter("elasticsearch"); + return () -> meter; + } + + // scope for testing + Instruments getInstruments() { + return instruments; + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java new file mode 100644 index 0000000000000..d3d485f52bc49 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.telemetry.metric.Instrument; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +/** + * An instrument that contains the name, description and unit. The delegate may be replaced when + * the provider is updated. + * Subclasses should implement the builder, which is used on initialization and provider updates. + * @param delegated instrument + */ +public abstract class AbstractInstrument implements Instrument { + private final AtomicReference delegate; + private final String name; + private final String description; + private final String unit; + + public AbstractInstrument(Meter meter, String name, String description, String unit) { + this.name = Objects.requireNonNull(name); + this.description = Objects.requireNonNull(description); + this.unit = Objects.requireNonNull(unit); + this.delegate = new AtomicReference<>(doBuildInstrument(meter)); + } + + private T doBuildInstrument(Meter meter) { + return AccessController.doPrivileged((PrivilegedAction) () -> buildInstrument(meter)); + } + + @Override + public String getName() { + return name; + } + + public String getUnit() { + return unit.toString(); + } + + T getInstrument() { + return delegate.get(); + } + + String getDescription() { + return description; + } + + void setProvider(@Nullable Meter meter) { + delegate.set(doBuildInstrument(Objects.requireNonNull(meter))); + } + + abstract T buildInstrument(Meter meter); +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java new file mode 100644 index 0000000000000..b25ffdff5481b --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleGaugeAdapter wraps an otel ObservableDoubleMeasurement + */ +public class DoubleCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleCounter { + + public DoubleCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + io.opentelemetry.api.metrics.DoubleCounter buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .counterBuilder(getName()) + .ofDoubles() + .setDescription(getDescription()) + .setUnit(getUnit()) + .build(); + } + + @Override + public void increment() { + getInstrument().add(1d); + } + + @Override + public void incrementBy(double inc) { + assert inc >= 0; + getInstrument().add(inc); + } + + @Override + public void incrementBy(double inc, Map attributes) { + assert inc >= 0; + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java new file mode 100644 index 0000000000000..9d55d475d4a93 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleGaugeAdapter wraps an otel ObservableDoubleMeasurement + */ +public class DoubleGaugeAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleGauge { + + public DoubleGaugeAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.ObservableDoubleMeasurement buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).gaugeBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).buildObserver(); + } + + @Override + public void record(double value) { + getInstrument().record(value); + } + + @Override + public void record(double value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java new file mode 100644 index 0000000000000..5fd1a8a189b0f --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleHistogramAdapter wraps an otel DoubleHistogram + */ +public class DoubleHistogramAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleHistogram { + + public DoubleHistogramAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.DoubleHistogram buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).histogramBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).build(); + } + + @Override + public void record(double value) { + getInstrument().record(value); + } + + @Override + public void record(double value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java new file mode 100644 index 0000000000000..9a2fc1b564766 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * DoubleUpDownCounterAdapter wraps an otel DoubleUpDownCounter + */ +public class DoubleUpDownCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.DoubleUpDownCounter { + + public DoubleUpDownCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.DoubleUpDownCounter buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .upDownCounterBuilder(getName()) + .ofDoubles() + .setDescription(getDescription()) + .setUnit(getUnit()) + .build(); + } + + @Override + public void add(double inc) { + getInstrument().add(inc); + } + + @Override + public void add(double inc, Map attributes) { + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java new file mode 100644 index 0000000000000..92d7d692f0ea5 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Container for registering and fetching instruments by type and name. + * Instrument names must be unique for a given type on registration. + * {@link #setProvider(Meter)} is used to change the provider for all existing instruments. + */ +public class Instruments { + private final Registrar doubleCounters = new Registrar<>(); + private final Registrar doubleUpDownCounters = new Registrar<>(); + private final Registrar doubleGauges = new Registrar<>(); + private final Registrar doubleHistograms = new Registrar<>(); + private final Registrar longCounters = new Registrar<>(); + private final Registrar longUpDownCounters = new Registrar<>(); + private final Registrar longGauges = new Registrar<>(); + private final Registrar longHistograms = new Registrar<>(); + + private final Meter meter; + + public Instruments(Meter meter) { + this.meter = meter; + } + + private final List> registrars = List.of( + doubleCounters, + doubleUpDownCounters, + doubleGauges, + doubleHistograms, + longCounters, + longUpDownCounters, + longGauges, + longHistograms + ); + + // Access to registration has to be restricted when the provider is updated in ::setProvider + protected final ReleasableLock registerLock = new ReleasableLock(new ReentrantLock()); + + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleCounters.register(new DoubleCounterAdapter(meter, name, description, unit)); + } + } + + public DoubleCounter getDoubleCounter(String name) { + return doubleCounters.get(name); + } + + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleUpDownCounters.register(new DoubleUpDownCounterAdapter(meter, name, description, unit)); + } + } + + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return doubleUpDownCounters.get(name); + } + + public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleGauges.register(new DoubleGaugeAdapter(meter, name, description, unit)); + } + } + + public DoubleGauge getDoubleGauge(String name) { + return doubleGauges.get(name); + } + + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return doubleHistograms.register(new DoubleHistogramAdapter(meter, name, description, unit)); + } + } + + public DoubleHistogram getDoubleHistogram(String name) { + return doubleHistograms.get(name); + } + + public LongCounter registerLongCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longCounters.register(new LongCounterAdapter(meter, name, description, unit)); + } + } + + public LongCounter getLongCounter(String name) { + return longCounters.get(name); + } + + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longUpDownCounters.register(new LongUpDownCounterAdapter(meter, name, description, unit)); + } + } + + public LongUpDownCounter getLongUpDownCounter(String name) { + return longUpDownCounters.get(name); + } + + public LongGauge registerLongGauge(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longGauges.register(new LongGaugeAdapter(meter, name, description, unit)); + } + } + + public LongGauge getLongGauge(String name) { + return longGauges.get(name); + } + + public LongHistogram registerLongHistogram(String name, String description, String unit) { + try (ReleasableLock lock = registerLock.acquire()) { + return longHistograms.register(new LongHistogramAdapter(meter, name, description, unit)); + } + } + + public LongHistogram getLongHistogram(String name) { + return longHistograms.get(name); + } + + public void setProvider(Meter meter) { + try (ReleasableLock lock = registerLock.acquire()) { + for (Registrar registrar : registrars) { + registrar.setProvider(meter); + } + } + } + + /** + * A typed wrapper for a instrument that + * @param + */ + private static class Registrar> { + private final Map registered = ConcurrentCollections.newConcurrentMap(); + + T register(T instrument) { + registered.compute(instrument.getName(), (k, v) -> { + if (v != null) { + throw new IllegalStateException( + instrument.getClass().getSimpleName() + "[" + instrument.getName() + "] already registered" + ); + } + + return instrument; + }); + return instrument; + } + + T get(String name) { + return registered.get(name); + } + + void setProvider(Meter meter) { + registered.forEach((k, v) -> v.setProvider(meter)); + } + } + + // scope for testing + Meter getMeter() { + return meter; + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java new file mode 100644 index 0000000000000..122d16d9e1aa4 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongCounterAdapter wraps an otel LongCounter + */ +public class LongCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongCounter { + + public LongCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.LongCounter buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).counterBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).build(); + } + + @Override + public void increment() { + getInstrument().add(1L); + } + + @Override + public void incrementBy(long inc) { + assert inc >= 0; + getInstrument().add(inc); + } + + @Override + public void incrementBy(long inc, Map attributes) { + assert inc >= 0; + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java new file mode 100644 index 0000000000000..48430285a5173 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongGaugeAdapter wraps an otel ObservableLongMeasurement + */ +public class LongGaugeAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongGauge { + + public LongGaugeAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.ObservableLongMeasurement buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .gaugeBuilder(getName()) + .ofLongs() + .setDescription(getDescription()) + .setUnit(getUnit()) + .buildObserver(); + } + + @Override + public void record(long value) { + getInstrument().record(value); + } + + @Override + public void record(long value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java new file mode 100644 index 0000000000000..bb5be4866e7b7 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongHistogramAdapter wraps an otel LongHistogram + */ +public class LongHistogramAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongHistogram { + + public LongHistogramAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.LongHistogram buildInstrument(Meter meter) { + return Objects.requireNonNull(meter) + .histogramBuilder(getName()) + .ofLongs() + .setDescription(getDescription()) + .setUnit(getUnit()) + .build(); + } + + @Override + public void record(long value) { + getInstrument().record(value); + } + + @Override + public void record(long value, Map attributes) { + getInstrument().record(value, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java new file mode 100644 index 0000000000000..e5af85e4ed192 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.metrics.Meter; + +import java.util.Map; +import java.util.Objects; + +/** + * LongUpDownCounterAdapter wraps an otel LongUpDownCounter + */ +public class LongUpDownCounterAdapter extends AbstractInstrument + implements + org.elasticsearch.telemetry.metric.LongUpDownCounter { + + public LongUpDownCounterAdapter(Meter meter, String name, String description, String unit) { + super(meter, name, description, unit); + } + + @Override + io.opentelemetry.api.metrics.LongUpDownCounter buildInstrument(Meter meter) { + var builder = Objects.requireNonNull(meter).upDownCounterBuilder(getName()); + return builder.setDescription(getDescription()).setUnit(getUnit()).build(); + } + + @Override + public void add(long inc) { + getInstrument().add(inc); + } + + @Override + public void add(long inc, Map attributes) { + getInstrument().add(inc, OtelHelper.fromMap(attributes)); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java new file mode 100644 index 0000000000000..673025a1a41f4 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.common.Attributes; + +import java.util.Map; + +class OtelHelper { + static Attributes fromMap(Map attributes) { + if (attributes == null || attributes.isEmpty()) { + return Attributes.empty(); + } + var builder = Attributes.builder(); + attributes.forEach((k, v) -> { + if (v instanceof String value) { + builder.put(k, value); + } else if (v instanceof Long value) { + builder.put(k, value); + } else if (v instanceof Double value) { + builder.put(k, value); + } else if (v instanceof Boolean value) { + builder.put(k, value); + } else { + throw new IllegalArgumentException("attributes do not support value type of [" + v.getClass().getCanonicalName() + "]"); + } + }); + return builder.build(); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index daedb90047975..428cd5262d692 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -24,7 +24,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.lucene.RegExp; import org.elasticsearch.common.settings.Settings; @@ -149,8 +149,7 @@ APMServices createApmServices() { return AccessController.doPrivileged((PrivilegedAction) () -> { var openTelemetry = GlobalOpenTelemetry.get(); - var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); - + var tracer = openTelemetry.getTracer("elasticsearch", Build.current().version()); return new APMServices(tracer, openTelemetry); }); } @@ -452,4 +451,5 @@ private static Automaton patternsToAutomaton(List patterns) { } return Operations.union(automata); } + } diff --git a/modules/apm/src/main/plugin-metadata/plugin-security.policy b/modules/apm/src/main/plugin-metadata/plugin-security.policy index b85d3ec05c277..57da3a2efd301 100644 --- a/modules/apm/src/main/plugin-metadata/plugin-security.policy +++ b/modules/apm/src/main/plugin-metadata/plugin-security.policy @@ -11,6 +11,8 @@ grant { permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "getClassLoader"; permission java.util.PropertyPermission "elastic.apm.*", "write"; + permission java.util.PropertyPermission "*", "read,write"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; grant codeBase "${codebase.elastic-apm-agent}" { diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java new file mode 100644 index 0000000000000..1064b8820b089 --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.sameInstance; + +public class APMMeterTests extends ESTestCase { + Meter testOtel = OpenTelemetry.noop().getMeter("test"); + + Meter noopOtel = OpenTelemetry.noop().getMeter("noop"); + + public void testMeterIsSetUponConstruction() { + // test default + APMMeter apmMeter = new APMMeter(Settings.EMPTY, () -> testOtel, () -> noopOtel); + + Meter meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + + // test explicitly enabled + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(testOtel)); + + // test explicitly disabled + settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + } + + public void testMeterIsOverridden() { + APMMeter apmMeter = new APMMeter(Settings.EMPTY, () -> testOtel, () -> noopOtel); + + Meter meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + + apmMeter.setEnabled(true); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(testOtel)); + } + + public void testLookupByName() { + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + + var apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + + DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("name", "desc", "unit"); + DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("name"); + + assertThat(lookedUpCounter, sameInstance(registeredCounter)); + } + + public void testNoopIsSetOnStop() { + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + APMMeter apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + apmMeter.start(); + + Meter meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(testOtel)); + + apmMeter.stop(); + + meter = apmMeter.getInstruments().getMeter(); + assertThat(meter, sameInstance(noopOtel)); + } + +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java new file mode 100644 index 0000000000000..51285894f27ee --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleGaugeBuilder; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.ObservableLongCounter; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.CountDownLatch; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +public class InstrumentsConcurrencyTests extends ESTestCase { + String name = "name"; + String description = "desc"; + String unit = "kg"; + Meter noopMeter = OpenTelemetry.noop().getMeter("noop"); + CountDownLatch registerLatch = new CountDownLatch(1); + Meter lockingMeter = new Meter() { + @Override + public LongCounterBuilder counterBuilder(String name) { + return new LockingLongCounterBuilder(); + } + + @Override + public LongUpDownCounterBuilder upDownCounterBuilder(String name) { + return null; + } + + @Override + public DoubleHistogramBuilder histogramBuilder(String name) { + return null; + } + + @Override + public DoubleGaugeBuilder gaugeBuilder(String name) { + return null; + } + }; + + class LockingLongCounterBuilder implements LongCounterBuilder { + + @Override + public LongCounterBuilder setDescription(String description) { + return this; + } + + @Override + public LongCounterBuilder setUnit(String unit) { + return this; + } + + @Override + public DoubleCounterBuilder ofDoubles() { + return null; + } + + @Override + public LongCounter build() { + try { + registerLatch.await(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return null; + } + + @Override + public ObservableLongCounter buildWithCallback(Consumer callback) { + return null; + } + } + + public void testLockingWhenRegistering() throws Exception { + Instruments instruments = new Instruments(lockingMeter); + + var registerThread = new Thread(() -> instruments.registerLongCounter(name, description, unit)); + // registerThread has a countDown latch that is simulating a long-running registration + registerThread.start(); + var setProviderThread = new Thread(() -> instruments.setProvider(noopMeter)); + // a setProviderThread will attempt to override a meter, but will wait to acquireLock + setProviderThread.start(); + + // assert that a thread is waiting for a lock during long-running registration + assertBusy(() -> assertThat(setProviderThread.getState(), equalTo(Thread.State.WAITING))); + // assert that the old lockingMeter is still in place + assertBusy(() -> assertThat(instruments.getMeter(), sameInstance(lockingMeter))); + + // finish long-running registration + registerLatch.countDown(); + // assert that a meter was overriden + assertBusy(() -> assertThat(instruments.getMeter(), sameInstance(lockingMeter))); + + } +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java new file mode 100644 index 0000000000000..daf511fcf7042 --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal.metrics; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +public class InstrumentsTests extends ESTestCase { + Meter noopMeter = OpenTelemetry.noop().getMeter("noop"); + Meter someOtherMeter = OpenTelemetry.noop().getMeter("xyz"); + String name = "name"; + String description = "desc"; + String unit = "kg"; + + public void testRegistrationAndLookup() { + Instruments instruments = new Instruments(noopMeter); + { + var registered = instruments.registerDoubleCounter(name, description, unit); + var lookedUp = instruments.getDoubleCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerDoubleUpDownCounter(name, description, unit); + var lookedUp = instruments.getDoubleUpDownCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerDoubleGauge(name, description, unit); + var lookedUp = instruments.getDoubleGauge(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerDoubleHistogram(name, description, unit); + var lookedUp = instruments.getDoubleHistogram(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongCounter(name, description, unit); + var lookedUp = instruments.getLongCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongUpDownCounter(name, description, unit); + var lookedUp = instruments.getLongUpDownCounter(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongGauge(name, description, unit); + var lookedUp = instruments.getLongGauge(name); + assertThat(registered, sameInstance(lookedUp)); + } + { + var registered = instruments.registerLongHistogram(name, description, unit); + var lookedUp = instruments.getLongHistogram(name); + assertThat(registered, sameInstance(lookedUp)); + } + } + + public void testNameValidation() { + Instruments instruments = new Instruments(noopMeter); + + instruments.registerLongHistogram(name, description, unit); + var e = expectThrows(IllegalStateException.class, () -> instruments.registerLongHistogram(name, description, unit)); + assertThat(e.getMessage(), equalTo("LongHistogramAdapter[name] already registered")); + } +} diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 384970bdc7ab9..30d7a5b6156c9 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -48,7 +49,6 @@ import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchRequestBuilder; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; @@ -297,7 +297,7 @@ public void testOtherWriteOps() throws Exception { { IndexRequest indexRequest = new IndexRequest(dataStreamName).source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON) .opType(DocWriteRequest.OpType.CREATE); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertThat(indexResponse.getIndex(), backingIndexEqualTo(dataStreamName, 1)); } { @@ -1176,7 +1176,7 @@ public void testIndexDocsWithCustomRoutingTargetingDataStreamIsNotAllowed() thro String dataStream = "logs-foobar"; IndexRequest indexRequest = new IndexRequest(dataStream).source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON) .opType(DocWriteRequest.OpType.CREATE); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertThat(indexResponse.getIndex(), backingIndexEqualTo(dataStream, 1)); // Index doc with custom routing that targets the data stream @@ -1238,7 +1238,7 @@ public void testIndexDocsWithCustomRoutingAllowed() throws Exception { IndexRequest indexRequest = new IndexRequest(dataStream).source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON) .opType(DocWriteRequest.OpType.CREATE) .routing("custom"); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertThat(indexResponse.getIndex(), backingIndexEqualTo(dataStream, 1)); // Index doc with custom routing that targets the data stream IndexRequest indexRequestWithRouting = new IndexRequest(dataStream).source("@timestamp", System.currentTimeMillis()) @@ -1266,7 +1266,7 @@ public void testIndexDocsWithCustomRoutingTargetingBackingIndex() throws Excepti // Index doc that triggers creation of a data stream IndexRequest indexRequest = new IndexRequest("logs-foobar").source("{\"@timestamp\": \"2020-12-12\"}", XContentType.JSON) .opType(DocWriteRequest.OpType.CREATE); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertThat(indexResponse.getIndex(), backingIndexEqualTo("logs-foobar", 1)); String backingIndex = indexResponse.getIndex(); @@ -1277,7 +1277,7 @@ public void testIndexDocsWithCustomRoutingTargetingBackingIndex() throws Excepti .id(indexResponse.getId()) .setIfPrimaryTerm(indexResponse.getPrimaryTerm()) .setIfSeqNo(indexResponse.getSeqNo()); - IndexResponse response = client().index(indexRequestWithRouting).actionGet(); + DocWriteResponse response = client().index(indexRequestWithRouting).actionGet(); assertThat(response.getIndex(), equalTo(backingIndex)); } @@ -1320,11 +1320,17 @@ public void testGetDataStream() throws Exception { ).actionGet(); assertThat(response.getDataStreams().size(), is(1)); DataStreamInfo metricsFooDataStream = response.getDataStreams().get(0); - assertThat(metricsFooDataStream.getDataStream().getName(), is("metrics-foo")); + DataStream dataStream = metricsFooDataStream.getDataStream(); + assertThat(dataStream.getName(), is("metrics-foo")); assertThat(metricsFooDataStream.getDataStreamStatus(), is(ClusterHealthStatus.YELLOW)); assertThat(metricsFooDataStream.getIndexTemplate(), is("template_for_foo")); assertThat(metricsFooDataStream.getIlmPolicy(), is(nullValue())); - assertThat(metricsFooDataStream.getDataStream().getLifecycle(), is(lifecycle)); + assertThat(dataStream.getLifecycle(), is(lifecycle)); + assertThat(metricsFooDataStream.templatePreferIlmValue(), is(true)); + GetDataStreamAction.Response.IndexProperties indexProperties = metricsFooDataStream.getIndexSettingsValues() + .get(dataStream.getWriteIndex()); + assertThat(indexProperties.ilmPolicyName(), is(nullValue())); + assertThat(indexProperties.preferIlm(), is(true)); } private static void assertBackingIndex(String backingIndex, String timestampFieldPathInMapping, Map expectedMapping) { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 9b07828e04225..715d2a7a4de2f 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; @@ -116,7 +115,10 @@ public void setup() throws Exception { ds2BackingIndexName = dsBackingIndexName.replace("-ds-", "-ds2-"); otherDs2BackingIndexName = otherDsBackingIndexName.replace("-other-ds-", "-other-ds2-"); - IndexResponse indexResponse = client.prepareIndex("ds").setOpType(DocWriteRequest.OpType.CREATE).setSource(DOCUMENT_SOURCE).get(); + DocWriteResponse indexResponse = client.prepareIndex("ds") + .setOpType(DocWriteRequest.OpType.CREATE) + .setSource(DOCUMENT_SOURCE) + .get(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); id = indexResponse.getId(); @@ -297,7 +299,7 @@ public void testSnapshotAndRestoreInPlace() { } public void testSnapshotAndRestoreAllIncludeSpecificDataStream() throws Exception { - IndexResponse indexResponse = client.prepareIndex("other-ds") + DocWriteResponse indexResponse = client.prepareIndex("other-ds") .setOpType(DocWriteRequest.OpType.CREATE) .setSource(DOCUMENT_SOURCE) .get(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java index 865de8b41cc68..0f60cbba0a4ff 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/SystemDataStreamSnapshotIT.java @@ -8,12 +8,12 @@ package org.elasticsearch.datastreams; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.index.IndexNotFoundException; @@ -64,7 +64,7 @@ public void testSystemDataStreamInGlobalState() throws Exception { } // Index a doc so that a concrete backing index will be created - IndexResponse indexRepsonse = client().prepareIndex(SYSTEM_DATA_STREAM_NAME) + DocWriteResponse indexRepsonse = client().prepareIndex(SYSTEM_DATA_STREAM_NAME) .setId("42") .setSource("{ \"@timestamp\": \"2099-03-08T11:06:07.000Z\", \"name\": \"my-name\" }", XContentType.JSON) .setOpType(DocWriteRequest.OpType.CREATE) @@ -162,7 +162,7 @@ public void testSystemDataStreamInFeatureState() throws Exception { } // Index a doc so that a concrete backing index will be created - IndexResponse indexToDataStreamResponse = client().prepareIndex(SYSTEM_DATA_STREAM_NAME) + DocWriteResponse indexToDataStreamResponse = client().prepareIndex(SYSTEM_DATA_STREAM_NAME) .setId("42") .setSource("{ \"@timestamp\": \"2099-03-08T11:06:07.000Z\", \"name\": \"my-name\" }", XContentType.JSON) .setOpType(DocWriteRequest.OpType.CREATE) @@ -171,7 +171,7 @@ public void testSystemDataStreamInFeatureState() throws Exception { assertThat(indexToDataStreamResponse.status().getStatus(), oneOf(200, 201)); // Index a doc so that a concrete backing index will be created - IndexResponse indexResponse = client().prepareIndex("my-index") + DocWriteResponse indexResponse = client().prepareIndex("my-index") .setId("42") .setSource("{ \"name\": \"my-name\" }", XContentType.JSON) .setOpType(DocWriteRequest.OpType.CREATE) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java index a368ec284fd0c..c5ca8445b08eb 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java @@ -36,7 +36,11 @@ protected String getTestRestCluster() { @Override protected Settings restAdminSettings() { - String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + if (super.restAdminSettings().keySet().contains(ThreadContext.PREFIX + ".Authorization")) { + return super.restAdminSettings(); + } else { + String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(super.restAdminSettings()).put(ThreadContext.PREFIX + ".Authorization", token).build(); + } } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index 73af952af524d..de81ca9bef18c 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -11,6 +11,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.IndexProperties; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; @@ -21,6 +23,7 @@ import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -39,9 +42,12 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.index.IndexSettings.PREFER_ILM_SETTING; + public class GetDataStreamsTransportAction extends TransportMasterNodeReadAction< GetDataStreamAction.Request, GetDataStreamAction.Response> { @@ -95,6 +101,7 @@ static GetDataStreamAction.Response innerOperation( List dataStreamInfos = new ArrayList<>(dataStreams.size()); for (DataStream dataStream : dataStreams) { final String indexTemplate; + boolean indexTemplatePreferIlmValue = true; String ilmPolicyName = null; if (dataStream.isSystem()) { SystemDataStreamDescriptor dataStreamDescriptor = systemIndices.findMatchingDataStreamDescriptor(dataStream.getName()); @@ -104,13 +111,15 @@ static GetDataStreamAction.Response innerOperation( dataStreamDescriptor.getComposableIndexTemplate(), dataStreamDescriptor.getComponentTemplates() ); - ilmPolicyName = settings.get("index.lifecycle.name"); + ilmPolicyName = settings.get(IndexMetadata.LIFECYCLE_NAME); + indexTemplatePreferIlmValue = PREFER_ILM_SETTING.get(settings); } } else { indexTemplate = MetadataIndexTemplateService.findV2Template(state.metadata(), dataStream.getName(), false); if (indexTemplate != null) { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), indexTemplate); - ilmPolicyName = settings.get("index.lifecycle.name"); + ilmPolicyName = settings.get(IndexMetadata.LIFECYCLE_NAME); + indexTemplatePreferIlmValue = PREFER_ILM_SETTING.get(settings); } else { LOGGER.warn( "couldn't find any matching template for data stream [{}]. has it been restored (and possibly renamed)" @@ -125,18 +134,35 @@ static GetDataStreamAction.Response innerOperation( dataStream.getIndices().stream().map(Index::getName).toArray(String[]::new) ); + Map backingIndicesSettingsValues = new HashMap<>(); + Metadata metadata = state.getMetadata(); + for (Index index : dataStream.getIndices()) { + IndexMetadata indexMetadata = metadata.index(index); + Boolean preferIlm = PREFER_ILM_SETTING.get(indexMetadata.getSettings()); + assert preferIlm != null : "must use the default prefer ilm setting value, if nothing else"; + ManagedBy managedBy; + if (metadata.isIndexManagedByILM(indexMetadata)) { + managedBy = ManagedBy.ILM; + } else if (dataStream.isIndexManagedByDataStreamLifecycle(index, metadata::index)) { + managedBy = ManagedBy.LIFECYCLE; + } else { + managedBy = ManagedBy.UNMANAGED; + } + backingIndicesSettingsValues.put(index, new IndexProperties(preferIlm, indexMetadata.getLifecyclePolicyName(), managedBy)); + } + GetDataStreamAction.Response.TimeSeries timeSeries = null; if (dataStream.getIndexMode() == IndexMode.TIME_SERIES) { List> ranges = new ArrayList<>(); Tuple current = null; String previousIndexName = null; for (Index index : dataStream.getIndices()) { - IndexMetadata metadata = state.getMetadata().index(index); - if (metadata.getIndexMode() != IndexMode.TIME_SERIES) { + IndexMetadata indexMetadata = metadata.index(index); + if (indexMetadata.getIndexMode() != IndexMode.TIME_SERIES) { continue; } - Instant start = metadata.getTimeSeriesStart(); - Instant end = metadata.getTimeSeriesEnd(); + Instant start = indexMetadata.getTimeSeriesStart(); + Instant end = indexMetadata.getTimeSeriesEnd(); if (current == null) { current = new Tuple<>(start, end); } else if (current.v2().compareTo(start) == 0) { @@ -175,7 +201,9 @@ static GetDataStreamAction.Response innerOperation( streamHealth.getStatus(), indexTemplate, ilmPolicyName, - timeSeries + timeSeries, + backingIndicesSettingsValues, + indexTemplatePreferIlmValue ) ); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java index 8617106d5cc28..803f5c8661f17 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java @@ -9,9 +9,9 @@ package org.elasticsearch.datastreams; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; @@ -58,7 +58,7 @@ protected Collection> getPlugins() { public void testGetTimestampFieldTypeForTsdbDataStream() throws IOException { createTemplate(true); - IndexResponse indexResponse = indexDoc(); + DocWriteResponse indexResponse = indexDoc(); var indicesService = getInstanceFromNode(IndicesService.class); var result = indicesService.getTimestampFieldType(indexResponse.getShardId().getIndex()); @@ -67,14 +67,14 @@ public void testGetTimestampFieldTypeForTsdbDataStream() throws IOException { public void testGetTimestampFieldTypeForDataStream() throws IOException { createTemplate(false); - IndexResponse indexResponse = indexDoc(); + DocWriteResponse indexResponse = indexDoc(); var indicesService = getInstanceFromNode(IndicesService.class); var result = indicesService.getTimestampFieldType(indexResponse.getShardId().getIndex()); assertThat(result, nullValue()); } - private IndexResponse indexDoc() { + private DocWriteResponse indexDoc() { Instant time = Instant.now(); var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); indexRequest.source(DOC.replace("$time", formatInstant(time)), XContentType.JSON); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 469c72e539c45..12e1604d10c1f 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -8,15 +8,33 @@ package org.elasticsearch.datastreams.action; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; import java.time.Instant; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; + +import static org.elasticsearch.cluster.metadata.DataStream.getDefaultBackingIndexName; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class GetDataStreamsResponseTests extends AbstractWireSerializingTestCase { @@ -43,13 +61,198 @@ protected Response mutateInstance(Response instance) { return new Response(instance.getDataStreams().stream().map(this::mutateInstance).toList()); } + @SuppressWarnings("unchecked") + public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Exception { + // we'll test a data stream with 3 backing indices - two managed by ILM (having the ILM policy configured for them) + // and one without any ILM policy configured + String dataStreamName = "logs"; + + Index firstGenerationIndex = new Index(getDefaultBackingIndexName(dataStreamName, 1), UUIDs.base64UUID()); + Index secondGenerationIndex = new Index(getDefaultBackingIndexName(dataStreamName, 2), UUIDs.base64UUID()); + Index writeIndex = new Index(getDefaultBackingIndexName(dataStreamName, 3), UUIDs.base64UUID()); + List indices = List.of(firstGenerationIndex, secondGenerationIndex, writeIndex); + { + // data stream has an enabled lifecycle + DataStream logs = new DataStream( + "logs", + indices, + 3, + null, + false, + false, + false, + true, + IndexMode.STANDARD, + new DataStreamLifecycle() + ); + + String ilmPolicyName = "rollover-30days"; + Map indexSettingsValues = Map.of( + firstGenerationIndex, + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + secondGenerationIndex, + new Response.IndexProperties(false, ilmPolicyName, ManagedBy.LIFECYCLE), + writeIndex, + new Response.IndexProperties(false, null, ManagedBy.LIFECYCLE) + ); + + Response.DataStreamInfo dataStreamInfo = new Response.DataStreamInfo( + logs, + ClusterHealthStatus.GREEN, + "index-template", + null, + null, + indexSettingsValues, + false + ); + Response response = new Response(List.of(dataStreamInfo)); + XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); + response.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); + + BytesReference bytes = BytesReference.bytes(contentBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { + Map map = parser.map(); + List dataStreams = (List) map.get(Response.DATA_STREAMS_FIELD.getPreferredName()); + assertThat(dataStreams.size(), is(1)); + Map dataStreamMap = (Map) dataStreams.get(0); + assertThat(dataStreamMap.get(DataStream.NAME_FIELD.getPreferredName()), is(dataStreamName)); + + assertThat(dataStreamMap.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(dataStreamMap.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat(dataStreamMap.get(Response.DataStreamInfo.LIFECYCLE_FIELD.getPreferredName()), is(Map.of("enabled", true))); + assertThat( + dataStreamMap.get(Response.DataStreamInfo.NEXT_GENERATION_INDEX_MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + + List indicesRepresentation = (List) dataStreamMap.get(DataStream.INDICES_FIELD.getPreferredName()); + Map firstGenIndexRepresentation = (Map) indicesRepresentation.get(0); + assertThat(firstGenIndexRepresentation.get("index_name"), is(firstGenerationIndex.getName())); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(true)); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(ilmPolicyName)); + assertThat( + firstGenIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.ILM.displayValue) + ); + + Map secondGenIndexRepresentation = (Map) indicesRepresentation.get(1); + assertThat(secondGenIndexRepresentation.get("index_name"), is(secondGenerationIndex.getName())); + assertThat(secondGenIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat( + secondGenIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), + is(ilmPolicyName) + ); + assertThat( + secondGenIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + + // the write index is managed by data stream lifecycle + Map writeIndexRepresentation = (Map) indicesRepresentation.get(2); + assertThat(writeIndexRepresentation.get("index_name"), is(writeIndex.getName())); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat( + writeIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + } + } + + { + // data stream has a lifecycle that's not enabled + DataStream logs = new DataStream( + "logs", + indices, + 3, + null, + false, + false, + false, + true, + IndexMode.STANDARD, + new DataStreamLifecycle(null, null, false) + ); + + String ilmPolicyName = "rollover-30days"; + Map indexSettingsValues = Map.of( + firstGenerationIndex, + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + secondGenerationIndex, + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + writeIndex, + new Response.IndexProperties(false, null, ManagedBy.UNMANAGED) + ); + + Response.DataStreamInfo dataStreamInfo = new Response.DataStreamInfo( + logs, + ClusterHealthStatus.GREEN, + "index-template", + null, + null, + indexSettingsValues, + false + ); + Response response = new Response(List.of(dataStreamInfo)); + XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); + response.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); + + BytesReference bytes = BytesReference.bytes(contentBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { + Map map = parser.map(); + List dataStreams = (List) map.get(Response.DATA_STREAMS_FIELD.getPreferredName()); + assertThat(dataStreams.size(), is(1)); + Map dataStreamMap = (Map) dataStreams.get(0); + assertThat(dataStreamMap.get(DataStream.NAME_FIELD.getPreferredName()), is(dataStreamName)); + // note that the prefer_ilm value is displayed at the top level even if the template backing the data stream doesn't have a + // policy specified anymore + assertThat(dataStreamMap.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(dataStreamMap.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat(dataStreamMap.get(Response.DataStreamInfo.LIFECYCLE_FIELD.getPreferredName()), is(Map.of("enabled", false))); + assertThat( + dataStreamMap.get(Response.DataStreamInfo.NEXT_GENERATION_INDEX_MANAGED_BY.getPreferredName()), + is(ManagedBy.UNMANAGED.displayValue) + ); + + List indicesRepresentation = (List) dataStreamMap.get(DataStream.INDICES_FIELD.getPreferredName()); + Map firstGenIndexRepresentation = (Map) indicesRepresentation.get(0); + assertThat(firstGenIndexRepresentation.get("index_name"), is(firstGenerationIndex.getName())); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(true)); + assertThat(firstGenIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(ilmPolicyName)); + assertThat( + firstGenIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.ILM.displayValue) + ); + + // the write index is managed by data stream lifecycle + Map writeIndexRepresentation = (Map) indicesRepresentation.get(2); + assertThat(writeIndexRepresentation.get("index_name"), is(writeIndex.getName())); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat(writeIndexRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); + assertThat( + writeIndexRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.UNMANAGED.displayValue) + ); + } + } + } + + public void testManagedByDisplayValuesDontAccidentalyChange() { + // UI might derive logic based on the display values so any changes should be coordinated with the UI team + assertThat(ManagedBy.ILM.displayValue, is("Index Lifecycle Management")); + assertThat(ManagedBy.LIFECYCLE.displayValue, is("Data stream lifecycle")); + assertThat(ManagedBy.UNMANAGED.displayValue, is("Unmanaged")); + } + private Response.DataStreamInfo mutateInstance(Response.DataStreamInfo instance) { var dataStream = instance.getDataStream(); var status = instance.getDataStreamStatus(); var indexTemplate = instance.getIndexTemplate(); var ilmPolicyName = instance.getIlmPolicy(); var timeSeries = instance.getTimeSeries(); - switch (randomIntBetween(0, 4)) { + var indexSettings = instance.getIndexSettingsValues(); + var templatePreferIlm = instance.templatePreferIlmValue(); + switch (randomIntBetween(0, 6)) { case 0 -> dataStream = randomValueOtherThan(dataStream, DataStreamTestHelper::randomInstance); case 1 -> status = randomValueOtherThan(status, () -> randomFrom(ClusterHealthStatus.values())); case 2 -> indexTemplate = randomBoolean() && indexTemplate != null ? null : randomAlphaOfLengthBetween(2, 10); @@ -57,8 +260,22 @@ private Response.DataStreamInfo mutateInstance(Response.DataStreamInfo instance) case 4 -> timeSeries = randomBoolean() && timeSeries != null ? null : randomValueOtherThan(timeSeries, () -> new Response.TimeSeries(generateRandomTimeSeries())); + case 5 -> indexSettings = randomValueOtherThan( + indexSettings, + () -> randomBoolean() + ? Map.of() + : Map.of( + new Index(randomAlphaOfLengthBetween(50, 100), UUIDs.base64UUID()), + new Response.IndexProperties( + randomBoolean(), + randomAlphaOfLengthBetween(50, 100), + randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE + ) + ) + ); + case 6 -> templatePreferIlm = templatePreferIlm ? false : true; } - return new Response.DataStreamInfo(dataStream, status, indexTemplate, ilmPolicyName, timeSeries); + return new Response.DataStreamInfo(dataStream, status, indexTemplate, ilmPolicyName, timeSeries, indexSettings, templatePreferIlm); } private List> generateRandomTimeSeries() { @@ -70,6 +287,21 @@ private List> generateRandomTimeSeries() { return timeSeries; } + private Map generateRandomIndexSettingsValues() { + Map values = new HashMap<>(); + for (int i = 0; i < randomIntBetween(0, 3); i++) { + values.put( + new Index(randomAlphaOfLengthBetween(50, 100), UUIDs.base64UUID()), + new Response.IndexProperties( + randomBoolean(), + randomAlphaOfLengthBetween(50, 100), + randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE + ) + ); + } + return values; + } + private Response.DataStreamInfo generateRandomDataStreamInfo() { List> timeSeries = randomBoolean() ? generateRandomTimeSeries() : null; return new Response.DataStreamInfo( @@ -77,7 +309,9 @@ private Response.DataStreamInfo generateRandomDataStreamInfo() { ClusterHealthStatus.GREEN, randomAlphaOfLengthBetween(2, 10), randomAlphaOfLengthBetween(2, 10), - timeSeries != null ? new Response.TimeSeries(timeSeries) : null + timeSeries != null ? new Response.TimeSeries(timeSeries) : null, + generateRandomIndexSettingsValues(), + randomBoolean() ); } } diff --git a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java index fa7b4ca1a80c0..43438bfe9e5fb 100644 --- a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java +++ b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -19,6 +20,7 @@ import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.ClassRule; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99764") public class DataStreamsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public DataStreamsClientYamlTestSuiteIT(final ClientYamlTestCandidate testCandidate) { diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 50c8e2c74dc74..09cec438d10cc 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -311,6 +311,77 @@ setup: name: simple-data-stream2 - is_true: acknowledged +--- +"Get data stream and check DSL and ILM information": + - skip: + version: " - 8.10.99" + reason: "data streams DSL and ILM mixing information available in 8.11+" + + - do: + allowed_warnings: + - "index template [mixing-dsl-template] has index patterns [mixing-dsl-stream] matching patterns from existing older templates + [global] with patterns (global => [*]); this template [mixing-dsl-template] will take precedence during new index creation" + indices.put_index_template: + name: mixing-dsl-template + body: + index_patterns: [mixing-dsl-stream] + template: + mappings: + properties: + '@timestamp': + type: date_nanos + lifecycle: + data_retention: "30d" + enabled: false + settings: + index.lifecycle.prefer_ilm: false + index.lifecycle.name: "missing_ilm_policy" + data_stream: {} + + - do: + indices.create_data_stream: + name: mixing-dsl-stream + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: mixing-dsl-stream + - match: { data_streams.0.name: mixing-dsl-stream } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - match: { data_streams.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.prefer_ilm: false } + - match: { data_streams.0.next_generation_managed_by: "Index Lifecycle Management" } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.prefer_ilm: false } + - match: { data_streams.0.indices.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.indices.0.managed_by: "Index Lifecycle Management" } + + - do: + indices.put_data_lifecycle: + name: "*" + body: > + { + "data_retention": "30d", + "enabled": true + } + + - is_true: acknowledged + + - do: + indices.get_data_stream: + name: mixing-dsl-stream + - match: { data_streams.0.name: mixing-dsl-stream } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - match: { data_streams.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.prefer_ilm: false } + - match: { data_streams.0.next_generation_managed_by: "Data stream lifecycle" } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.prefer_ilm: false } + - match: { data_streams.0.indices.0.ilm_policy: "missing_ilm_policy" } + - match: { data_streams.0.indices.0.managed_by: "Data stream lifecycle" } + --- "Delete data stream with backing indices": - skip: diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java index f2cebfc2569d7..96ca77a5f65f9 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -321,7 +320,7 @@ public boolean validateClusterForming() { ); // but this one should pass since it has a longer timeout - final PlainActionFuture future = new PlainActionFuture<>(); + final PlainActionFuture future = new PlainActionFuture<>(); client().prepareIndex("index") .setId("passes1") .setSource("x", 2) @@ -333,7 +332,7 @@ public boolean validateClusterForming() { internalCluster().startNode(Settings.builder().put(GatewayService.RECOVER_AFTER_DATA_NODES_SETTING.getKey(), "1")); ensureYellow("index"); - final IndexResponse indexResponse = future.actionGet(timeout); + final DocWriteResponse indexResponse = future.actionGet(timeout); assertThat(indexResponse.status(), equalTo(RestStatus.CREATED)); assertThat(indexResponse.getResult(), equalTo(DocWriteResponse.Result.CREATED)); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java index dbfde52fd62ea..8490b17d535c6 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.ingest.geoip; import org.apache.lucene.util.Constants; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -101,7 +101,7 @@ public void testLazyLoading() throws IOException { final IndexRequest indexRequest = new IndexRequest("index"); indexRequest.setPipeline("geoip"); indexRequest.source(Collections.singletonMap("ip", "1.1.1.1")); - final IndexResponse indexResponse = client(ingestNode).index(indexRequest).actionGet(); + final DocWriteResponse indexResponse = client(ingestNode).index(indexRequest).actionGet(); assertThat(indexResponse.status(), equalTo(RestStatus.CREATED)); // now the geo-IP database should be loaded on the ingest node assertDatabaseLoadStatus(ingestNode, true); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java index 9f3334a07d8f3..f5a57e68581f2 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteRequest.OpType; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -220,7 +221,7 @@ public void testIndexChunks() throws IOException { AtomicInteger chunkIndex = new AtomicInteger(); - client.addHandler(IndexAction.INSTANCE, (IndexRequest request, ActionListener listener) -> { + client.addHandler(IndexAction.INSTANCE, (IndexRequest request, ActionListener listener) -> { int chunk = chunkIndex.getAndIncrement(); assertEquals(OpType.CREATE, request.opType()); assertThat(request.id(), Matchers.startsWith("test_" + (chunk + 15) + "_")); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 131db77f4e7c5..7d6c02096e438 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -15,7 +15,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.XContentParser; @@ -76,7 +76,11 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client if (searchRequest.source().explain() != null) { searchTemplateRequest.setExplain(searchRequest.source().explain()); } - return channel -> client.execute(SearchTemplateAction.INSTANCE, searchTemplateRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute( + SearchTemplateAction.INSTANCE, + searchTemplateRequest, + new RestToXContentListener<>(channel, SearchTemplateResponse::status) + ); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index cb17ed039460d..b4b804bf22e92 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -26,7 +26,7 @@ import java.io.InputStream; import java.util.Map; -public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject { +public class SearchTemplateResponse extends ActionResponse implements ToXContentObject { public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output"); /** Contains the source of the rendered template **/ @@ -113,7 +113,6 @@ void innerToXContent(XContentBuilder builder, Params params) throws IOException } } - @Override public RestStatus status() { if (hasResponse()) { return response.status(); diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 55fdc509f6a42..4cbf40849cbe9 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.MockSecureSettings; @@ -234,11 +235,11 @@ public void testLargeBlobCountDeletion() throws Exception { for (int i = 0; i < numberOfBlobs; i++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); - container.writeBlob(blobName, new BytesArray(bytes), false); + container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new BytesArray(bytes), false); } - container.delete(); - assertThat(container.listBlobs(), is(anEmptyMap())); + container.delete(OperationPurpose.SNAPSHOT); + assertThat(container.listBlobs(OperationPurpose.SNAPSHOT), is(anEmptyMap())); } } @@ -249,7 +250,7 @@ public void testDeleteBlobsIgnoringIfNotExists() throws Exception { for (int i = 0; i < 10; i++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); - container.writeBlob(blobName, new BytesArray(bytes), false); + container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new BytesArray(bytes), false); blobsToDelete.add(blobName); } @@ -259,15 +260,18 @@ public void testDeleteBlobsIgnoringIfNotExists() throws Exception { } Randomness.shuffle(blobsToDelete); - container.deleteBlobsIgnoringIfNotExists(blobsToDelete.iterator()); - assertThat(container.listBlobs(), is(anEmptyMap())); + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobsToDelete.iterator()); + assertThat(container.listBlobs(OperationPurpose.SNAPSHOT), is(anEmptyMap())); } } public void testNotFoundErrorMessageContainsFullKey() throws Exception { try (BlobStore store = newBlobStore()) { BlobContainer container = store.blobContainer(BlobPath.EMPTY.add("nested").add("dir")); - NoSuchFileException exception = expectThrows(NoSuchFileException.class, () -> container.readBlob("blob")); + NoSuchFileException exception = expectThrows( + NoSuchFileException.class, + () -> container.readBlob(OperationPurpose.SNAPSHOT, "blob") + ); assertThat(exception.getMessage(), containsString("nested/dir/blob] not found")); } } @@ -277,10 +281,10 @@ public void testReadByteByByte() throws Exception { BlobContainer container = store.blobContainer(BlobPath.EMPTY.add(UUIDs.randomBase64UUID())); var data = randomBytes(randomIntBetween(128, 512)); String blobName = randomName(); - container.writeBlob(blobName, new ByteArrayInputStream(data), data.length, true); + container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new ByteArrayInputStream(data), data.length, true); var originalDataInputStream = new ByteArrayInputStream(data); - try (var azureInputStream = container.readBlob(blobName)) { + try (var azureInputStream = container.readBlob(OperationPurpose.SNAPSHOT, blobName)) { for (int i = 0; i < data.length; i++) { assertThat(originalDataInputStream.read(), is(equalTo(azureInputStream.read()))); } @@ -288,7 +292,7 @@ public void testReadByteByByte() throws Exception { assertThat(azureInputStream.read(), is(equalTo(-1))); assertThat(originalDataInputStream.read(), is(equalTo(-1))); } - container.delete(); + container.delete(OperationPurpose.SNAPSHOT); } } } diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 4f2d7eb1afcfb..2cb4476f528b9 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.Settings; @@ -138,8 +139,14 @@ public void testMultiBlockUpload() throws Exception { PlainActionFuture future = PlainActionFuture.newFuture(); repo.threadPool().generic().execute(ActionRunnable.run(future, () -> { final BlobContainer blobContainer = repo.blobStore().blobContainer(repo.basePath().add("large_write")); - blobContainer.writeBlob(UUIDs.base64UUID(), new ByteArrayInputStream(randomByteArrayOfLength(blobSize)), blobSize, false); - blobContainer.delete(); + blobContainer.writeBlob( + OperationPurpose.SNAPSHOT, + UUIDs.base64UUID(), + new ByteArrayInputStream(randomByteArrayOfLength(blobSize)), + blobSize, + false + ); + blobContainer.delete(OperationPurpose.SNAPSHOT); })); future.get(); } diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index b4e6039aae1e4..fbf57a0198644 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.BlobMetadata; @@ -45,15 +46,16 @@ public class AzureBlobContainer extends AbstractBlobContainer { } @Override - public boolean blobExists(String blobName) throws IOException { + public boolean blobExists(OperationPurpose purpose, String blobName) throws IOException { logger.trace("blobExists({})", blobName); return blobStore.blobExists(buildKey(blobName)); } - private InputStream openInputStream(String blobName, long position, @Nullable Long length) throws IOException { + private InputStream openInputStream(OperationPurpose purpose, String blobName, long position, @Nullable Long length) + throws IOException { String blobKey = buildKey(blobName); logger.trace("readBlob({}) from position [{}] with length [{}]", blobName, position, length != null ? length : "unlimited"); - if (blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY && blobExists(blobName) == false) { + if (blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY && blobExists(purpose, blobName) == false) { // On Azure, if the location path is a secondary location, and the blob does not // exist, instead of returning immediately from the getInputStream call below // with a 404 StorageException, Azure keeps trying and trying for a long timeout @@ -76,13 +78,13 @@ private InputStream openInputStream(String blobName, long position, @Nullable Lo } @Override - public InputStream readBlob(String blobName) throws IOException { - return openInputStream(blobName, 0L, null); + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return openInputStream(purpose, blobName, 0L, null); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { - return openInputStream(blobName, position, length); + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return openInputStream(purpose, blobName, position, length); } @Override @@ -91,23 +93,26 @@ public long readBlobPreferredLength() { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { logger.trace("writeBlob({}, stream, {})", buildKey(blobName), blobSize); blobStore.writeBlob(buildKey(blobName), inputStream, blobSize, failIfAlreadyExists); } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(blobName, bytes, failIfAlreadyExists); + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + writeBlob(purpose, blobName, bytes, failIfAlreadyExists); } @Override - public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { blobStore.writeBlob(buildKey(blobName), bytes, failIfAlreadyExists); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -117,13 +122,13 @@ public void writeMetadataBlob( } @Override - public DeleteResult delete() throws IOException { + public DeleteResult delete(OperationPurpose purpose) throws IOException { return blobStore.deleteBlobDirectory(keyPath); } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - blobStore.deleteBlobsIgnoringIfNotExists(new Iterator<>() { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + blobStore.deleteBlobsIgnoringIfNotExists(purpose, new Iterator<>() { @Override public boolean hasNext() { return blobNames.hasNext(); @@ -137,19 +142,19 @@ public String next() { } @Override - public Map listBlobsByPrefix(@Nullable String prefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, @Nullable String prefix) throws IOException { logger.trace("listBlobsByPrefix({})", prefix); return blobStore.listBlobsByPrefix(keyPath, prefix); } @Override - public Map listBlobs() throws IOException { + public Map listBlobs(OperationPurpose purpose) throws IOException { logger.trace("listBlobs()"); - return listBlobsByPrefix(null); + return listBlobsByPrefix(purpose, null); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { final BlobPath path = path(); return blobStore.children(path); } @@ -175,13 +180,14 @@ private boolean skipIfNotPrimaryOnlyLocationMode(ActionListener listener) { } @Override - public void getRegister(String key, ActionListener listener) { + public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { if (skipRegisterOperation(listener)) return; ActionListener.completeWith(listener, () -> blobStore.getRegister(buildKey(key), keyPath, key)); } @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index 70789c5568fbb..d5e510dd5288a 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.BlobContainerUtils; import org.elasticsearch.common.blobstore.support.BlobMetadata; @@ -264,7 +265,7 @@ private static void filterDeleteExceptionsAndRethrow(Exception e, IOException ex } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobs) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobs) throws IOException { if (blobs.hasNext() == false) { return; } diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java index 4c9a3b1e69919..f5c1d0b8ac00b 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -60,11 +61,11 @@ public void testReadNonexistentBlobThrowsNoSuchFileException() { final BlobContainer blobContainer = createBlobContainer(between(1, 5)); final Exception exception = expectThrows(NoSuchFileException.class, () -> { if (randomBoolean()) { - blobContainer.readBlob("read_nonexistent_blob"); + blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob"); } else { final long position = randomLongBetween(0, MAX_RANGE_VAL - 1L); final long length = randomLongBetween(1, MAX_RANGE_VAL - position); - blobContainer.readBlob("read_nonexistent_blob", position, length); + blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob", position, length); } }); assertThat(exception.toString(), exception.getMessage().toLowerCase(Locale.ROOT), containsString("not found")); @@ -111,7 +112,7 @@ public void testReadBlobWithRetries() throws Exception { }); final BlobContainer blobContainer = createBlobContainer(maxRetries); - try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_max_retries")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); assertThat(countDownHead.isCountedDown(), is(true)); assertThat(countDownGet.isCountedDown(), is(true)); @@ -159,7 +160,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); final int position = randomIntBetween(0, bytes.length - 1); final int length = randomIntBetween(1, bytes.length - position); - try (InputStream inputStream = blobContainer.readBlob("read_range_blob_max_retries", position, length)) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_range_blob_max_retries", position, length)) { final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(inputStream)); assertArrayEquals(Arrays.copyOfRange(bytes, position, Math.min(bytes.length, position + length)), bytesRead); assertThat(countDownGet.isCountedDown(), is(true)); @@ -202,7 +203,7 @@ public void testWriteBlobWithRetries() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_max_retries", stream, bytes.length, false); } assertThat(countDown.isCountedDown(), is(true)); } @@ -272,7 +273,7 @@ public void testWriteLargeBlob() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { - blobContainer.writeBlob("write_large_blob", stream, data.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_large_blob", stream, data.length, false); } assertThat(countDownUploads.get(), equalTo(0)); @@ -340,7 +341,7 @@ public void testWriteLargeBlobStreaming() throws Exception { }); final BlobContainer blobContainer = createBlobContainer(maxRetries); - blobContainer.writeMetadataBlob("write_large_blob_streaming", false, randomBoolean(), out -> { + blobContainer.writeMetadataBlob(OperationPurpose.SNAPSHOT, "write_large_blob_streaming", false, randomBoolean(), out -> { int outstanding = data.length; while (outstanding > 0) { if (randomBoolean()) { @@ -390,7 +391,13 @@ public void reset() {} }) { final IOException ioe = expectThrows( IOException.class, - () -> blobContainer.writeBlob("write_blob_max_retries", stream, randomIntBetween(1, 128), randomBoolean()) + () -> blobContainer.writeBlob( + OperationPurpose.SNAPSHOT, + "write_blob_max_retries", + stream, + randomIntBetween(1, 128), + randomBoolean() + ) ); assertThat(ioe.getMessage(), is("Unable to write blob write_blob_max_retries")); // The mock http server uses 1 thread to process the requests, it's possible that the @@ -464,7 +471,7 @@ public void testRetryFromSecondaryLocationPolicies() throws Exception { } final BlobContainer blobContainer = createBlobContainer(maxRetries, secondaryHost, locationMode); - try (InputStream inputStream = blobContainer.readBlob("read_blob_from_secondary")) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_from_secondary")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); // It does round robin, first tries on the primary, then on the secondary diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java index 8d473e1d9ba57..cfc4e17949771 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.MockSecureSettings; @@ -76,7 +77,7 @@ public void testSasTokenIsUsedAsProvidedInSettings() throws Exception { }); final BlobContainer blobContainer = createBlobContainer(maxRetries, null, LocationMode.PRIMARY_ONLY, clientName, secureSettings); - try (InputStream inputStream = blobContainer.readBlob("sas_test")) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "sas_test")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); } } diff --git a/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 8a4fb8eb41bd3..b0eafb3bc37ab 100644 --- a/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.Streams; @@ -131,7 +132,7 @@ public void testDeleteSingleItem() { f, () -> repository.blobStore() .blobContainer(repository.basePath()) - .deleteBlobsIgnoringIfNotExists(Iterators.single("foo")) + .deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, Iterators.single("foo")) ) ) ); @@ -197,7 +198,7 @@ public void testWriteReadLarge() throws IOException { random().nextBytes(data); writeBlob(container, "foobar", new BytesArray(data), false); } - try (InputStream stream = container.readBlob("foobar")) { + try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, "foobar")) { BytesRefBuilder target = new BytesRefBuilder(); while (target.length() < data.length) { byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())]; @@ -208,7 +209,7 @@ public void testWriteReadLarge() throws IOException { assertEquals(data.length, target.length()); assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length())); } - container.delete(); + container.delete(OperationPurpose.SNAPSHOT); } } diff --git a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java index a5d1b8bb04b9a..9e587eb4dc543 100644 --- a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java +++ b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.BlobMetadata; @@ -37,7 +38,7 @@ class GoogleCloudStorageBlobContainer extends AbstractBlobContainer { } @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { try { return blobStore.blobExists(buildKey(blobName)); } catch (Exception e) { @@ -46,42 +47,45 @@ public boolean blobExists(String blobName) { } @Override - public Map listBlobs() throws IOException { + public Map listBlobs(OperationPurpose purpose) throws IOException { return blobStore.listBlobs(path); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { return blobStore.listChildren(path()); } @Override - public Map listBlobsByPrefix(String prefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, String prefix) throws IOException { return blobStore.listBlobsByPrefix(path, prefix); } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { return blobStore.readBlob(buildKey(blobName)); } @Override - public InputStream readBlob(final String blobName, final long position, final long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, final String blobName, final long position, final long length) + throws IOException { return blobStore.readBlob(buildKey(blobName), position, length); } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { blobStore.writeBlob(buildKey(blobName), inputStream, blobSize, failIfAlreadyExists); } @Override - public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { blobStore.writeBlob(buildKey(blobName), bytes, failIfAlreadyExists); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -91,18 +95,19 @@ public void writeMetadataBlob( } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(blobName, bytes, failIfAlreadyExists); + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + writeBlob(purpose, blobName, bytes, failIfAlreadyExists); } @Override - public DeleteResult delete() throws IOException { - return blobStore.deleteDirectory(path().buildAsString()); + public DeleteResult delete(OperationPurpose purpose) throws IOException { + return blobStore.deleteDirectory(purpose, path().buildAsString()); } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - blobStore.deleteBlobsIgnoringIfNotExists(new Iterator<>() { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + blobStore.deleteBlobsIgnoringIfNotExists(purpose, new Iterator<>() { @Override public boolean hasNext() { return blobNames.hasNext(); @@ -122,6 +127,7 @@ private String buildKey(String blobName) { @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, @@ -132,7 +138,7 @@ public void compareAndExchangeRegister( } @Override - public void getRegister(String key, ActionListener listener) { + public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { if (skipCas(listener)) return; ActionListener.completeWith(listener, () -> blobStore.getRegister(buildKey(key), path, key)); } diff --git a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 76fade3c5afae..51d26a169ad0e 100644 --- a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.BlobContainerUtils; import org.elasticsearch.common.blobstore.support.BlobMetadata; @@ -488,9 +489,10 @@ private void writeBlobMultipart(BlobInfo blobInfo, byte[] buffer, int offset, in /** * Deletes the given path and all its children. * + * @param purpose The purpose of the delete operation * @param pathStr Name of path to delete */ - DeleteResult deleteDirectory(String pathStr) throws IOException { + DeleteResult deleteDirectory(OperationPurpose purpose, String pathStr) throws IOException { return SocketAccess.doPrivilegedIOException(() -> { DeleteResult deleteResult = DeleteResult.ZERO; Page page = client().list(bucketName, BlobListOption.prefix(pathStr)); @@ -498,7 +500,7 @@ DeleteResult deleteDirectory(String pathStr) throws IOException { final AtomicLong blobsDeleted = new AtomicLong(0L); final AtomicLong bytesDeleted = new AtomicLong(0L); final Iterator blobs = page.getValues().iterator(); - deleteBlobsIgnoringIfNotExists(new Iterator<>() { + deleteBlobsIgnoringIfNotExists(purpose, new Iterator<>() { @Override public boolean hasNext() { return blobs.hasNext(); @@ -522,10 +524,11 @@ public String next() { /** * Deletes multiple blobs from the specific bucket using a batch request * + * @param purpose the purpose of the delete operation * @param blobNames names of the blobs to delete */ @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { if (blobNames.hasNext() == false) { return; } diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index ef38da7eb20ea..d23d9385ab1a2 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; @@ -187,7 +188,7 @@ public void testReadLargeBlobWithRetries() throws Exception { exchange.close(); }); - try (InputStream inputStream = blobContainer.readBlob("large_blob_retries")) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "large_blob_retries")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); } } @@ -230,7 +231,7 @@ public void testWriteBlobWithRetries() throws Exception { })); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_max_retries", stream, bytes.length, false); } assertThat(countDown.isCountedDown(), is(true)); } @@ -253,7 +254,7 @@ public void testWriteBlobWithReadTimeouts() { Exception exception = expectThrows(StorageException.class, () -> { try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob("write_blob_timeout", stream, bytes.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_timeout", stream, bytes.length, false); } }); assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); @@ -391,10 +392,10 @@ public void testWriteLargeBlob() throws IOException { if (randomBoolean()) { try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { - blobContainer.writeBlob("write_large_blob", stream, data.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_large_blob", stream, data.length, false); } } else { - blobContainer.writeMetadataBlob("write_large_blob", false, randomBoolean(), out -> out.write(data)); + blobContainer.writeMetadataBlob(OperationPurpose.SNAPSHOT, "write_large_blob", false, randomBoolean(), out -> out.write(data)); } assertThat(countInits.get(), equalTo(0)); @@ -451,7 +452,7 @@ public String next() { exchange.getResponseBody().write(response); })); - blobContainer.deleteBlobsIgnoringIfNotExists(blobNamesIterator); + blobContainer.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNamesIterator); // Ensure that the remaining deletes are sent in the last batch if (pendingDeletes.get() > 0) { diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 5386cad8dce9b..e38347ad30292 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; @@ -90,7 +91,10 @@ public void testDeleteBlobsIgnoringIfNotExistsThrowsIOException() throws Excepti ) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); - IOException e = expectThrows(IOException.class, () -> container.deleteBlobsIgnoringIfNotExists(blobs.iterator())); + IOException e = expectThrows( + IOException.class, + () -> container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobs.iterator()) + ); assertThat(e.getCause(), instanceOf(StorageException.class)); } } diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 184b487d26e1b..d7294cab93844 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -22,6 +22,8 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.MockSecureSettings; @@ -54,6 +56,7 @@ import org.elasticsearch.xcontent.XContentFactory; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -64,6 +67,7 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -202,6 +206,31 @@ public void testAbortRequestStats() throws Exception { assertEquals(assertionErrorMsg, mockCalls, sdkRequestCounts); } + public void testRequestStatsWithOperationPurposes() throws IOException { + // The operationPurpose parameter is added but not yet used. This test asserts the new parameter does not change + // the existing stats collection. + final String repoName = createRepository(randomRepositoryName()); + final RepositoriesService repositoriesService = internalCluster().getCurrentMasterNodeInstance(RepositoriesService.class); + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repoName); + final BlobStore blobStore = repository.blobStore(); + + final BlobPath blobPath = repository.basePath().add(randomAlphaOfLength(10)); + final BlobContainer blobContainer = blobStore.blobContainer(blobPath); + final OperationPurpose purpose = randomFrom(OperationPurpose.values()); + final BytesArray whatToWrite = new BytesArray(randomByteArrayOfLength(randomIntBetween(100, 1000))); + blobContainer.writeBlob(purpose, "test.txt", whatToWrite, true); + try (InputStream is = blobContainer.readBlob(purpose, "test.txt")) { + is.readAllBytes(); + } + blobContainer.delete(purpose); + + final Map stats = blobStore.stats(); + assertThat( + stats.keySet(), + containsInAnyOrder("GetObject", "ListObjects", "PutObject", "PutMultipartObject", "DeleteObjects", "AbortMultipartObject") + ); + } + public void testEnforcedCooldownPeriod() throws IOException { final String repoName = randomRepositoryName(); createRepository( @@ -243,7 +272,12 @@ public void testEnforcedCooldownPeriod() throws IOException { f, () -> repository.blobStore() .blobContainer(repository.basePath()) - .writeBlobAtomic(BlobStoreRepository.INDEX_FILE_PREFIX + modifiedRepositoryData.getGenId(), serialized, true) + .writeBlobAtomic( + OperationPurpose.SNAPSHOT, + BlobStoreRepository.INDEX_FILE_PREFIX + modifiedRepositoryData.getGenId(), + serialized, + true + ) ) ) ); diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index c06a8580d845b..87613b0e8f6a1 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -126,7 +127,7 @@ public long absoluteTimeInMillis() { class TestHarness { boolean tryCompareAndSet(BytesReference expected, BytesReference updated) { return PlainActionFuture.get( - future -> blobContainer.compareAndSetRegister("key", expected, updated, future), + future -> blobContainer.compareAndSetRegister(OperationPurpose.SNAPSHOT, "key", expected, updated, future), 10, TimeUnit.SECONDS ); @@ -134,7 +135,11 @@ boolean tryCompareAndSet(BytesReference expected, BytesReference updated) { BytesReference readRegister() { return PlainActionFuture.get( - future -> blobContainer.getRegister("key", future.map(OptionalBytesReference::bytesReference)), + future -> blobContainer.getRegister( + OperationPurpose.SNAPSHOT, + "key", + future.map(OptionalBytesReference::bytesReference) + ), 10, TimeUnit.SECONDS ); @@ -181,7 +186,7 @@ List listMultipartUploads() { assertThat(testHarness.listMultipartUploads(), hasSize(0)); assertEquals(bytes2, testHarness.readRegister()); } finally { - blobContainer.delete(); + blobContainer.delete(OperationPurpose.SNAPSHOT); } } finally { ThreadPool.terminate(threadpool, 10, TimeUnit.SECONDS); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 86650bc0fe9c2..c7dee4f1599c5 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.BlobContainerUtils; @@ -87,7 +88,7 @@ class S3BlobContainer extends AbstractBlobContainer { } @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { try (AmazonS3Reference clientReference = blobStore.clientReference()) { return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); } catch (final Exception e) { @@ -96,12 +97,12 @@ public boolean blobExists(String blobName) { } @Override - public InputStream readBlob(String blobName) throws IOException { - return new S3RetryingInputStream(blobStore, buildKey(blobName)); + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return new S3RetryingInputStream(purpose, blobStore, buildKey(blobName)); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { if (position < 0L) { throw new IllegalArgumentException("position must be non-negative"); } @@ -111,7 +112,7 @@ public InputStream readBlob(String blobName, long position, long length) throws if (length == 0) { return new ByteArrayInputStream(new byte[0]); } else { - return new S3RetryingInputStream(blobStore, buildKey(blobName), position, Math.addExact(position, length - 1)); + return new S3RetryingInputStream(purpose, blobStore, buildKey(blobName), position, Math.addExact(position, length - 1)); } } @@ -125,13 +126,14 @@ public long readBlobPreferredLength() { * This implementation ignores the failIfAlreadyExists flag as the S3 API has no way to enforce this due to its weak consistency model. */ @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { assert inputStream.markSupported() : "No mark support on inputStream breaks the S3 SDK's ability to retry requests"; SocketAccess.doPrivilegedIOException(() -> { if (blobSize <= getLargeBlobThresholdInBytes()) { - executeSingleUpload(blobStore, buildKey(blobName), inputStream, blobSize); + executeSingleUpload(purpose, blobStore, buildKey(blobName), inputStream, blobSize); } else { - executeMultipartUpload(blobStore, buildKey(blobName), inputStream, blobSize); + executeMultipartUpload(purpose, blobStore, buildKey(blobName), inputStream, blobSize); } return null; }); @@ -139,6 +141,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -165,7 +168,7 @@ private void flushBuffer(boolean lastPart) throws IOException { uploadId.set( SocketAccess.doPrivileged( () -> clientReference.client() - .initiateMultipartUpload(initiateMultiPartUpload(absoluteBlobKey)) + .initiateMultipartUpload(initiateMultiPartUpload(purpose, absoluteBlobKey)) .getUploadId() ) ); @@ -175,6 +178,7 @@ private void flushBuffer(boolean lastPart) throws IOException { } assert lastPart == false || successful : "must only write last part if successful"; final UploadPartRequest uploadRequest = createPartUploadRequest( + purpose, buffer.bytes().streamInput(), uploadId.get(), parts.size() + 1, @@ -191,7 +195,7 @@ private void flushBuffer(boolean lastPart) throws IOException { @Override protected void onCompletion() throws IOException { if (flushedBytes == 0L) { - writeBlob(blobName, buffer.bytes(), failIfAlreadyExists); + writeBlob(purpose, blobName, buffer.bytes(), failIfAlreadyExists); } else { flushBuffer(true); final CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest( @@ -208,7 +212,7 @@ protected void onCompletion() throws IOException { @Override protected void onFailure() { if (Strings.hasText(uploadId.get())) { - abortMultiPartUpload(uploadId.get(), absoluteBlobKey); + abortMultiPartUpload(purpose, uploadId.get(), absoluteBlobKey); } } } @@ -219,6 +223,7 @@ protected void onFailure() { } private UploadPartRequest createPartUploadRequest( + OperationPurpose purpose, InputStream stream, String uploadId, int number, @@ -238,7 +243,7 @@ private UploadPartRequest createPartUploadRequest( return uploadRequest; } - private void abortMultiPartUpload(String uploadId, String blobName) { + private void abortMultiPartUpload(OperationPurpose purpose, String uploadId, String blobName) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(blobStore.bucket(), blobName, uploadId); abortRequest.setRequestMetricCollector(blobStore.abortPartUploadMetricCollector); try (AmazonS3Reference clientReference = blobStore.clientReference()) { @@ -246,7 +251,7 @@ private void abortMultiPartUpload(String uploadId, String blobName) { } } - private InitiateMultipartUploadRequest initiateMultiPartUpload(String blobName) { + private InitiateMultipartUploadRequest initiateMultiPartUpload(OperationPurpose purpose, String blobName) { final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(blobStore.bucket(), blobName); initRequest.setStorageClass(blobStore.getStorageClass()); initRequest.setCannedACL(blobStore.getCannedACL()); @@ -265,12 +270,13 @@ long getLargeBlobThresholdInBytes() { } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(blobName, bytes, failIfAlreadyExists); + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + writeBlob(purpose, blobName, bytes, failIfAlreadyExists); } @Override - public DeleteResult delete() throws IOException { + public DeleteResult delete(OperationPurpose purpose) throws IOException { final AtomicLong deletedBlobs = new AtomicLong(); final AtomicLong deletedBytes = new AtomicLong(); try (AmazonS3Reference clientReference = blobStore.clientReference()) { @@ -294,10 +300,10 @@ public DeleteResult delete() throws IOException { return summary.getKey(); }); if (list.isTruncated()) { - blobStore.deleteBlobsIgnoringIfNotExists(blobNameIterator); + blobStore.deleteBlobsIgnoringIfNotExists(purpose, blobNameIterator); prevListing = list; } else { - blobStore.deleteBlobsIgnoringIfNotExists(Iterators.concat(blobNameIterator, Iterators.single(keyPath))); + blobStore.deleteBlobsIgnoringIfNotExists(purpose, Iterators.concat(blobNameIterator, Iterators.single(keyPath))); break; } } @@ -308,14 +314,18 @@ public DeleteResult delete() throws IOException { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - blobStore.deleteBlobsIgnoringIfNotExists(Iterators.map(blobNames, this::buildKey)); + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + blobStore.deleteBlobsIgnoringIfNotExists(purpose, Iterators.map(blobNames, this::buildKey)); } @Override - public Map listBlobsByPrefix(@Nullable String blobNamePrefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, @Nullable String blobNamePrefix) throws IOException { try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return executeListing(clientReference, listObjectsRequest(blobNamePrefix == null ? keyPath : buildKey(blobNamePrefix))).stream() + return executeListing( + purpose, + clientReference, + listObjectsRequest(purpose, blobNamePrefix == null ? keyPath : buildKey(blobNamePrefix)) + ).stream() .flatMap(listing -> listing.getObjectSummaries().stream()) .map(summary -> new BlobMetadata(summary.getKey().substring(keyPath.length()), summary.getSize())) .collect(Collectors.toMap(BlobMetadata::name, Function.identity())); @@ -325,14 +335,14 @@ public Map listBlobsByPrefix(@Nullable String blobNamePref } @Override - public Map listBlobs() throws IOException { - return listBlobsByPrefix(null); + public Map listBlobs(OperationPurpose purpose) throws IOException { + return listBlobsByPrefix(purpose, null); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return executeListing(clientReference, listObjectsRequest(keyPath)).stream().flatMap(listing -> { + return executeListing(purpose, clientReference, listObjectsRequest(purpose, keyPath)).stream().flatMap(listing -> { assert listing.getObjectSummaries().stream().noneMatch(s -> { for (String commonPrefix : listing.getCommonPrefixes()) { if (s.getKey().substring(keyPath.length()).startsWith(commonPrefix)) { @@ -353,7 +363,11 @@ public Map children() throws IOException { } } - private List executeListing(AmazonS3Reference clientReference, ListObjectsRequest listObjectsRequest) { + private List executeListing( + OperationPurpose purpose, + AmazonS3Reference clientReference, + ListObjectsRequest listObjectsRequest + ) { final List results = new ArrayList<>(); ObjectListing prevListing = null; while (true) { @@ -375,7 +389,7 @@ private List executeListing(AmazonS3Reference clientReference, Li return results; } - private ListObjectsRequest listObjectsRequest(String pathPrefix) { + private ListObjectsRequest listObjectsRequest(OperationPurpose purpose, String pathPrefix) { return new ListObjectsRequest().withBucketName(blobStore.bucket()) .withPrefix(pathPrefix) .withDelimiter("/") @@ -390,8 +404,13 @@ String buildKey(String blobName) { /** * Uploads a blob using a single upload request */ - void executeSingleUpload(final S3BlobStore s3BlobStore, final String blobName, final InputStream input, final long blobSize) - throws IOException { + void executeSingleUpload( + OperationPurpose purpose, + final S3BlobStore s3BlobStore, + final String blobName, + final InputStream input, + final long blobSize + ) throws IOException { // Extra safety checks if (blobSize > MAX_FILE_SIZE.getBytes()) { @@ -421,8 +440,13 @@ void executeSingleUpload(final S3BlobStore s3BlobStore, final String blobName, f /** * Uploads a blob using multipart upload requests. */ - void executeMultipartUpload(final S3BlobStore s3BlobStore, final String blobName, final InputStream input, final long blobSize) - throws IOException { + void executeMultipartUpload( + OperationPurpose purpose, + final S3BlobStore s3BlobStore, + final String blobName, + final InputStream input, + final long blobSize + ) throws IOException { ensureMultiPartUploadSize(blobSize); final long partSize = s3BlobStore.bufferSizeInBytes(); @@ -443,7 +467,7 @@ void executeMultipartUpload(final S3BlobStore s3BlobStore, final String blobName uploadId.set( SocketAccess.doPrivileged( - () -> clientReference.client().initiateMultipartUpload(initiateMultiPartUpload(blobName)).getUploadId() + () -> clientReference.client().initiateMultipartUpload(initiateMultiPartUpload(purpose, blobName)).getUploadId() ) ); if (Strings.isEmpty(uploadId.get())) { @@ -456,6 +480,7 @@ void executeMultipartUpload(final S3BlobStore s3BlobStore, final String blobName for (int i = 1; i <= nbParts; i++) { final boolean lastPart = i == nbParts; final UploadPartRequest uploadRequest = createPartUploadRequest( + purpose, input, uploadId.get(), i, @@ -489,7 +514,7 @@ void executeMultipartUpload(final S3BlobStore s3BlobStore, final String blobName throw new IOException("Unable to upload object [" + blobName + "] using multipart upload", e); } finally { if ((success == false) && Strings.hasLength(uploadId.get())) { - abortMultiPartUpload(uploadId.get(), blobName); + abortMultiPartUpload(purpose, uploadId.get(), blobName); } } } @@ -538,13 +563,15 @@ static Tuple numberOfMultiparts(final long totalSize, final long par private class CompareAndExchangeOperation { + private final OperationPurpose purpose; private final AmazonS3 client; private final String bucket; private final String rawKey; private final String blobKey; private final ThreadPool threadPool; - CompareAndExchangeOperation(AmazonS3 client, String bucket, String key, ThreadPool threadPool) { + CompareAndExchangeOperation(OperationPurpose purpose, AmazonS3 client, String bucket, String key, ThreadPool threadPool) { + this.purpose = purpose; this.client = client; this.bucket = bucket; this.rawKey = key; @@ -678,6 +705,7 @@ void run(BytesReference expected, BytesReference updated, ActionListener getRegister( + purpose, rawKey, delegate1.delegateFailure((delegate2, currentValue) -> ActionListener.completeWith(delegate2, () -> { if (currentValue.isPresent() && currentValue.bytesReference().equals(expected)) { @@ -760,6 +788,7 @@ private void abortMultipartUploadIfExists(String uploadId) { @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, @@ -774,7 +803,7 @@ public void compareAndExchangeRegister( delegate.onFailure(e); } }), clientReference), - l -> new CompareAndExchangeOperation(clientReference.client(), blobStore.bucket(), key, blobStore.getThreadPool()).run( + l -> new CompareAndExchangeOperation(purpose, clientReference.client(), blobStore.bucket(), key, blobStore.getThreadPool()).run( expected, updated, l @@ -783,7 +812,7 @@ public void compareAndExchangeRegister( } @Override - public void getRegister(String key, ActionListener listener) { + public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { ActionListener.completeWith(listener, () -> { final var getObjectRequest = new GetObjectRequest(blobStore.bucket(), buildKey(key)); getObjectRequest.setRequestMetricCollector(blobStore.getMetricCollector); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 027fd03d83c55..fca005e8de32c 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; @@ -214,7 +215,7 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { if (blobNames.hasNext() == false) { return; } @@ -227,12 +228,12 @@ public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IO blobNames.forEachRemaining(key -> { partition.add(key); if (partition.size() == MAX_BULK_DELETES) { - deletePartition(clientReference, partition, aex); + deletePartition(purpose, clientReference, partition, aex); partition.clear(); } }); if (partition.isEmpty() == false) { - deletePartition(clientReference, partition, aex); + deletePartition(purpose, clientReference, partition, aex); } }); if (aex.get() != null) { @@ -243,9 +244,14 @@ public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IO } } - private void deletePartition(AmazonS3Reference clientReference, List partition, AtomicReference aex) { + private void deletePartition( + OperationPurpose purpose, + AmazonS3Reference clientReference, + List partition, + AtomicReference aex + ) { try { - clientReference.client().deleteObjects(bulkDelete(this, partition)); + clientReference.client().deleteObjects(bulkDelete(purpose, this, partition)); } catch (MultiObjectDeleteException e) { // We are sending quiet mode requests so we can't use the deleted keys entry on the exception and instead // first remove all keys that were sent in the request and then add back those that ran into an exception. @@ -264,7 +270,7 @@ private void deletePartition(AmazonS3Reference clientReference, List par } } - private static DeleteObjectsRequest bulkDelete(S3BlobStore blobStore, List blobs) { + private static DeleteObjectsRequest bulkDelete(OperationPurpose purpose, S3BlobStore blobStore, List blobs) { return new DeleteObjectsRequest(blobStore.bucket()).withKeys(blobs.toArray(Strings.EMPTY_ARRAY)) .withQuiet(true) .withRequestMetricCollector(blobStore.deleteMetricCollector); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 7885e36c3c295..d796eb49e7bcb 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -18,6 +18,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.Version; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; import java.io.IOException; @@ -41,6 +42,7 @@ class S3RetryingInputStream extends InputStream { static final int MAX_SUPPRESSED_EXCEPTIONS = 10; + private final OperationPurpose purpose; private final S3BlobStore blobStore; private final String blobKey; private final long start; @@ -56,18 +58,19 @@ class S3RetryingInputStream extends InputStream { private boolean closed; private boolean eof; - S3RetryingInputStream(S3BlobStore blobStore, String blobKey) throws IOException { - this(blobStore, blobKey, 0, Long.MAX_VALUE - 1); + S3RetryingInputStream(OperationPurpose purpose, S3BlobStore blobStore, String blobKey) throws IOException { + this(purpose, blobStore, blobKey, 0, Long.MAX_VALUE - 1); } // both start and end are inclusive bounds, following the definition in GetObjectRequest.setRange - S3RetryingInputStream(S3BlobStore blobStore, String blobKey, long start, long end) throws IOException { + S3RetryingInputStream(OperationPurpose purpose, S3BlobStore blobStore, String blobKey, long start, long end) throws IOException { if (start < 0L) { throw new IllegalArgumentException("start must be non-negative"); } if (end < start || end == Long.MAX_VALUE) { throw new IllegalArgumentException("end must be >= start and not Long.MAX_VALUE"); } + this.purpose = purpose; this.blobStore = blobStore; this.blobKey = blobKey; this.failures = new ArrayList<>(MAX_SUPPRESSED_EXCEPTIONS); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index af462f0f4c723..b0f443964e03a 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -159,13 +160,13 @@ protected BlobContainer createBlobContainer( ) ) { @Override - public InputStream readBlob(String blobName) throws IOException { - return new AssertingInputStream(super.readBlob(blobName), blobName); + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return new AssertingInputStream(super.readBlob(purpose, blobName), blobName); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { - return new AssertingInputStream(super.readBlob(blobName, position, length), blobName, position, length); + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return new AssertingInputStream(super.readBlob(purpose, blobName, position, length), blobName, position, length); } }; } @@ -213,7 +214,7 @@ public void testWriteBlobWithRetries() throws Exception { } }); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_max_retries", stream, bytes.length, false); } assertThat(countDown.isCountedDown(), is(true)); } @@ -236,7 +237,7 @@ public void testWriteBlobWithReadTimeouts() { Exception exception = expectThrows(IOException.class, () -> { try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob("write_blob_timeout", stream, bytes.length, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_timeout", stream, bytes.length, false); } }); assertThat( @@ -342,7 +343,7 @@ public void testWriteLargeBlob() throws Exception { } }); - blobContainer.writeBlob("write_large_blob", new ZeroInputStream(blobSize), blobSize, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_large_blob", new ZeroInputStream(blobSize), blobSize, false); assertThat(countDownInitiate.isCountedDown(), is(true)); assertThat(countDownUploads.get(), equalTo(0)); @@ -440,7 +441,7 @@ public void testWriteLargeBlobStreaming() throws Exception { } }); - blobContainer.writeMetadataBlob("write_large_blob_streaming", false, randomBoolean(), out -> { + blobContainer.writeMetadataBlob(OperationPurpose.SNAPSHOT, "write_large_blob_streaming", false, randomBoolean(), out -> { final byte[] buffer = new byte[16 * 1024]; long outstanding = blobSize; while (outstanding > 0) { @@ -515,7 +516,7 @@ public void handle(HttpExchange exchange) throws IOException { httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_max_retries"), new FlakyReadHandler()); - try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index ade8c8a47be84..9ae2589759d3f 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; @@ -58,7 +59,7 @@ public void testExecuteSingleUploadBlobSizeTooLarge() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeSingleUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) + () -> blobContainer.executeSingleUpload(OperationPurpose.SNAPSHOT, blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); } @@ -72,7 +73,13 @@ public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeSingleUpload(blobStore, blobName, new ByteArrayInputStream(new byte[0]), ByteSizeUnit.MB.toBytes(2)) + () -> blobContainer.executeSingleUpload( + OperationPurpose.SNAPSHOT, + blobStore, + blobName, + new ByteArrayInputStream(new byte[0]), + ByteSizeUnit.MB.toBytes(2) + ) ); assertEquals("Upload request size [2097152] can't be larger than buffer size", e.getMessage()); } @@ -114,7 +121,7 @@ public void testExecuteSingleUpload() throws IOException { when(client.putObject(argumentCaptor.capture())).thenReturn(new PutObjectResult()); final ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[blobSize]); - blobContainer.executeSingleUpload(blobStore, blobName, inputStream, blobSize); + blobContainer.executeSingleUpload(OperationPurpose.SNAPSHOT, blobStore, blobName, inputStream, blobSize); final PutObjectRequest request = argumentCaptor.getValue(); assertEquals(bucketName, request.getBucketName()); @@ -135,7 +142,13 @@ public void testExecuteMultipartUploadBlobSizeTooLarge() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) + () -> blobContainer.executeMultipartUpload( + OperationPurpose.SNAPSHOT, + blobStore, + randomAlphaOfLengthBetween(1, 10), + null, + blobSize + ) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); } @@ -147,7 +160,13 @@ public void testExecuteMultipartUploadBlobSizeTooSmall() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) + () -> blobContainer.executeMultipartUpload( + OperationPurpose.SNAPSHOT, + blobStore, + randomAlphaOfLengthBetween(1, 10), + null, + blobSize + ) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be smaller than 5mb", e.getMessage()); } @@ -211,7 +230,7 @@ public void testExecuteMultipartUpload() throws IOException { final ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[0]); final S3BlobContainer blobContainer = new S3BlobContainer(blobPath, blobStore); - blobContainer.executeMultipartUpload(blobStore, blobName, inputStream, blobSize); + blobContainer.executeMultipartUpload(OperationPurpose.SNAPSHOT, blobStore, blobName, inputStream, blobSize); final InitiateMultipartUploadRequest initRequest = initArgCaptor.getValue(); assertEquals(bucketName, initRequest.getBucketName()); @@ -317,7 +336,13 @@ public void testExecuteMultipartUploadAborted() { final IOException e = expectThrows(IOException.class, () -> { final S3BlobContainer blobContainer = new S3BlobContainer(BlobPath.EMPTY, blobStore); - blobContainer.executeMultipartUpload(blobStore, blobName, new ByteArrayInputStream(new byte[0]), blobSize); + blobContainer.executeMultipartUpload( + OperationPurpose.SNAPSHOT, + blobStore, + blobName, + new ByteArrayInputStream(new byte[0]), + blobSize + ); }); assertEquals("Unable to upload object [" + blobName + "] using multipart upload", e.getMessage()); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java index f15e4ada6b609..d8366236a8184 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java @@ -14,6 +14,7 @@ import com.amazonaws.services.s3.model.S3ObjectInputStream; import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.io.Streams; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; @@ -93,11 +94,11 @@ private S3RetryingInputStream createInputStream(final byte[] data, @Nullable fin if (position != null && length != null) { s3Object.getObjectMetadata().setContentLength(length); s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data, position, length), new HttpGet())); - return new S3RetryingInputStream(blobStore, "_blob", position, Math.addExact(position, length - 1)); + return new S3RetryingInputStream(OperationPurpose.SNAPSHOT, blobStore, "_blob", position, Math.addExact(position, length - 1)); } else { s3Object.getObjectMetadata().setContentLength(data.length); s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data), new HttpGet())); - return new S3RetryingInputStream(blobStore, "_blob"); + return new S3RetryingInputStream(OperationPurpose.SNAPSHOT, blobStore, "_blob"); } } } diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/FileURLBlobContainer.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/FileURLBlobContainer.java index 27fafe39b60cf..7bb732d2c17ed 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/FileURLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/FileURLBlobContainer.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.blobstore.url; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import java.io.InputStream; import java.net.URL; @@ -19,7 +20,7 @@ public FileURLBlobContainer(URLBlobStore blobStore, BlobPath blobPath, URL path) } @Override - public InputStream readBlob(String blobName, long position, long length) { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) { throw new UnsupportedOperationException("URL repository doesn't support this operation. Please use a 'fs' repository instead"); } } diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java index c6e4f2fa6f007..c33e1519e8a28 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.BlobMetadata; @@ -67,7 +68,7 @@ public URL url() { * This operation is not supported by URLBlobContainer */ @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { assert false : "should never be called for a read-only url repo"; throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @@ -76,12 +77,12 @@ public boolean blobExists(String blobName) { * This operation is not supported by URLBlobContainer */ @Override - public Map listBlobs() throws IOException { + public Map listBlobs(OperationPurpose purpose) throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @@ -89,7 +90,7 @@ public Map children() throws IOException { * This operation is not supported by URLBlobContainer */ @Override - public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @@ -97,17 +98,17 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws * This operation is not supported by URLBlobContainer */ @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { throw new UnsupportedOperationException("URL repository is read only"); } @Override - public DeleteResult delete() { + public DeleteResult delete(OperationPurpose purpose) { throw new UnsupportedOperationException("URL repository is read only"); } @Override - public InputStream readBlob(String name) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name) throws IOException { try { return new BufferedInputStream(getInputStream(new URL(path, name)), blobStore.bufferSizeInBytes()); } catch (FileNotFoundException fnfe) { @@ -116,17 +117,19 @@ public InputStream readBlob(String name) throws IOException { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -136,7 +139,8 @@ public void writeMetadataBlob( } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } @@ -151,6 +155,7 @@ private static InputStream getInputStream(URL url) throws IOException { @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java index 327a66e94e6dd..0a6a7c22e79e5 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.url.http.HttpURLBlobContainer; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; import org.elasticsearch.common.blobstore.url.http.URLHttpClientSettings; @@ -108,7 +109,7 @@ public BlobContainer blobContainer(BlobPath blobPath) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { throw new UnsupportedOperationException("Bulk deletes are not supported in URL repositories"); } diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/HttpURLBlobContainer.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/HttpURLBlobContainer.java index 106e611151b06..186c119d78230 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/HttpURLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/http/HttpURLBlobContainer.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.blobstore.url.http; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.url.URLBlobContainer; import org.elasticsearch.common.blobstore.url.URLBlobStore; @@ -35,7 +36,7 @@ public HttpURLBlobContainer( } @Override - public InputStream readBlob(String name, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name, long position, long length) throws IOException { if (length == 0) { return new ByteArrayInputStream(new byte[0]); } @@ -51,7 +52,7 @@ public InputStream readBlob(String name, long position, long length) throws IOEx } @Override - public InputStream readBlob(String name) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name) throws IOException { return new RetryingHttpInputStream(name, getURIForBlob(name), httpClient, httpClientSettings.getMaxRetries()); } diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java index 88a248453cc98..92cb0c1cf75a2 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; @@ -33,7 +34,7 @@ public void testURLBlobStoreCanReadBlob() throws IOException { BytesArray data = getOriginalData(); String blobName = getBlobName(); BlobContainer container = getBlobContainer(); - try (InputStream stream = container.readBlob(blobName)) { + try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, blobName)) { BytesReference bytesRead = Streams.readFully(stream); assertThat(data, equalTo(bytesRead)); } @@ -45,7 +46,7 @@ public void testURLBlobStoreCanReadBlobRange() throws IOException { BlobContainer container = getBlobContainer(); int position = randomIntBetween(0, data.length() - 1); int length = randomIntBetween(1, data.length() - position); - try (InputStream stream = container.readBlob(blobName, position, length)) { + try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, blobName, position, length)) { BytesReference bytesRead = Streams.readFully(stream); assertThat(data.slice(position, length), equalTo(bytesRead)); } @@ -54,7 +55,7 @@ public void testURLBlobStoreCanReadBlobRange() throws IOException { public void testNoBlobFound() throws IOException { BlobContainer container = getBlobContainer(); String incorrectBlobName = UUIDs.base64UUID(); - try (InputStream ignored = container.readBlob(incorrectBlobName)) { + try (InputStream ignored = container.readBlob(OperationPurpose.SNAPSHOT, incorrectBlobName)) { ignored.read(); fail("Should have thrown NoSuchFileException exception"); } catch (NoSuchFileException e) { diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java index 3e5d46f08264f..7bc793415c63e 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; import org.elasticsearch.common.blobstore.url.http.URLHttpClientSettings; import org.elasticsearch.common.bytes.BytesArray; @@ -59,6 +60,6 @@ String getBlobName() { @Override public void testURLBlobStoreCanReadBlobRange() throws IOException { - expectThrows(UnsupportedOperationException.class, () -> getBlobContainer().readBlob("test", 0, 12)); + expectThrows(UnsupportedOperationException.class, () -> getBlobContainer().readBlob(OperationPurpose.SNAPSHOT, "test", 0, 12)); } } diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java index 01bbc78a2bf4a..f8d55ecab6ab8 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; import org.elasticsearch.common.blobstore.url.http.URLHttpClientSettings; import org.elasticsearch.common.bytes.BytesArray; @@ -126,8 +127,14 @@ String getBlobName() { public void testRangeReadOutsideOfLegalRange() { BlobContainer container = getBlobContainer(); - expectThrows(IllegalArgumentException.class, () -> container.readBlob(blobName, -1, content.length).read()); - expectThrows(IOException.class, () -> container.readBlob(blobName, content.length + 1, content.length).read()); + expectThrows( + IllegalArgumentException.class, + () -> container.readBlob(OperationPurpose.SNAPSHOT, blobName, -1, content.length).read() + ); + expectThrows( + IOException.class, + () -> container.readBlob(OperationPurpose.SNAPSHOT, blobName, content.length + 1, content.length).read() + ); } private String getEndpointForServer() { diff --git a/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java b/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java index 4bd71de9829d5..d7ff672fd5c0e 100644 --- a/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java +++ b/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.AbstractXContentSerializingTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,19 +34,19 @@ protected MainResponse createTestInstance() { String clusterUuid = randomAlphaOfLength(10); ClusterName clusterName = new ClusterName(randomAlphaOfLength(10)); String nodeName = randomAlphaOfLength(10); - Version version = VersionUtils.randomCompatibleVersion(random(), Version.CURRENT); + String versionString = randomAlphaOfLength(10); IndexVersion indexVersion = IndexVersionUtils.randomVersion(); Build build = newBuild( Build.current(), Map.of( "version", - version.toString(), + versionString, "minWireCompatVersion", - version.minimumCompatibilityVersion().toString(), + randomAlphaOfLength(10), "minIndexCompatVersion", Build.minimumCompatString(IndexVersion.getMinimumCompatibleIndexVersion(indexVersion.id())), "displayString", - Build.defaultDisplayString(Build.current().type(), Build.current().hash(), Build.current().date(), version.toString()) + Build.defaultDisplayString(Build.current().type(), Build.current().hash(), Build.current().date(), versionString) ) ); return new MainResponse(nodeName, indexVersion.luceneVersion().toString(), clusterName, clusterUuid, build); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 9f5df75fb28f0..fe74ba7a5b0ea 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.fs.FsBlobContainer; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; @@ -72,18 +73,18 @@ final class HdfsBlobContainer extends AbstractBlobContainer { private static final DeleteResult DELETE_RESULT = new DeleteResult(1L, 0L); @Override - public boolean blobExists(String blobName) throws IOException { + public boolean blobExists(OperationPurpose purpose, String blobName) throws IOException { return store.execute(fileContext -> fileContext.util().exists(new Path(path, blobName))); } @Override - public DeleteResult delete() throws IOException { + public DeleteResult delete(OperationPurpose purpose) throws IOException { store.execute(fileContext -> fileContext.delete(path, true)); return DELETE_RESULT; } @Override - public void deleteBlobsIgnoringIfNotExists(final Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, final Iterator blobNames) throws IOException { IOException ioe = null; while (blobNames.hasNext()) { final String blobName = blobNames.next(); @@ -105,7 +106,7 @@ public void deleteBlobsIgnoringIfNotExists(final Iterator blobNames) thr } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { // FSDataInputStream does buffering internally // FSDataInputStream can open connections on read() or skip() so we wrap in // HDFSPrivilegedInputSteam which will ensure that underlying methods will @@ -120,7 +121,7 @@ public InputStream readBlob(String blobName) throws IOException { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { // FSDataInputStream does buffering internally // FSDataInputStream can open connections on read() or skip() so we wrap in // HDFSPrivilegedInputSteam which will ensure that underlying methods will @@ -140,7 +141,8 @@ public InputStream readBlob(String blobName, long position, long length) throws } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { Path blob = new Path(path, blobName); // we pass CREATE, which means it fails if a blob already exists. final EnumSet flags = failIfAlreadyExists @@ -157,7 +159,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } @Override - public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { Path blob = new Path(path, blobName); // we pass CREATE, which means it fails if a blob already exists. final EnumSet flags = failIfAlreadyExists @@ -175,6 +177,7 @@ public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlrea @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -218,7 +221,8 @@ public void writeMetadataBlob( } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { final String tempBlob = FsBlobContainer.tempBlobName(blobName); final Path tempBlobPath = new Path(path, tempBlob); final Path blob = new Path(path, blobName); @@ -259,7 +263,7 @@ private void writeToPath( } @Override - public Map listBlobsByPrefix(@Nullable final String prefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, @Nullable final String prefix) throws IOException { FileStatus[] files; try { files = store.execute( @@ -278,12 +282,12 @@ public Map listBlobsByPrefix(@Nullable final String prefix } @Override - public Map listBlobs() throws IOException { - return listBlobsByPrefix(null); + public Map listBlobs(OperationPurpose purpose) throws IOException { + return listBlobsByPrefix(purpose, null); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { FileStatus[] files = store.execute(fileContext -> fileContext.util().listStatus(path)); Map map = new LinkedHashMap<>(); for (FileStatus file : files) { @@ -342,6 +346,7 @@ public synchronized void reset() throws IOException { @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index 1dc246cdeeb66..a7c40980858ad 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import java.io.IOException; import java.util.Iterator; @@ -71,7 +72,7 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { throw new UnsupportedOperationException("Bulk deletes are not supported in Hdfs repositories"); } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index 348d392c59a69..6d7aca0ca1d56 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -20,6 +20,7 @@ import org.apache.hadoop.util.Progressable; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.core.Streams; import org.elasticsearch.core.SuppressForbidden; @@ -130,7 +131,7 @@ public void testReadOnly() throws Exception { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "foo", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "foo", data.length), data); - assertTrue(container.blobExists("foo")); + assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "foo")); } public void testReadRange() throws Exception { @@ -161,7 +162,7 @@ public void testReadRange() throws Exception { int pos = randomIntBetween(0, data.length / 2); int len = randomIntBetween(pos, data.length) - pos; assertArrayEquals(readBlobPartially(container, "foo", pos, len), Arrays.copyOfRange(data, pos, pos + len)); - assertTrue(container.blobExists("foo")); + assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "foo")); } public void testReplicationFactor() throws Exception { @@ -208,24 +209,24 @@ public void testListBlobsByPrefix() throws Exception { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "foo", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "foo", data.length), data); - assertTrue(container.blobExists("foo")); + assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "foo")); writeBlob(container, "bar", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "bar", data.length), data); - assertTrue(container.blobExists("bar")); + assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "bar")); - assertEquals(2, container.listBlobsByPrefix(null).size()); - assertEquals(1, container.listBlobsByPrefix("fo").size()); - assertEquals(0, container.listBlobsByPrefix("noSuchFile").size()); + assertEquals(2, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, null).size()); + assertEquals(1, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "fo").size()); + assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "noSuchFile").size()); - container.delete(); - assertEquals(0, container.listBlobsByPrefix(null).size()); - assertEquals(0, container.listBlobsByPrefix("fo").size()); - assertEquals(0, container.listBlobsByPrefix("noSuchFile").size()); + container.delete(OperationPurpose.SNAPSHOT); + assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, null).size()); + assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "fo").size()); + assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "noSuchFile").size()); } public static byte[] readBlobPartially(BlobContainer container, String name, int pos, int length) throws IOException { byte[] data = new byte[length]; - try (InputStream inputStream = container.readBlob(name, pos, length)) { + try (InputStream inputStream = container.readBlob(OperationPurpose.SNAPSHOT, name, pos, length)) { assertThat(Streams.readFully(inputStream, data), CoreMatchers.equalTo(length)); assertThat(inputStream.read(), CoreMatchers.equalTo(-1)); } diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index b3e384906a609..75c3d8d77dd72 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -1869,7 +1869,7 @@ public void testTransportCompressionSetting() throws IOException { assumeTrue("the old transport.compress setting existed before 7.14", getOldClusterVersion().before(Version.V_7_14_0)); assumeTrue( "Early versions of 6.x do not have cluster.remote* prefixed settings", - getOldClusterVersion().onOrAfter(Version.V_7_14_0.minimumCompatibilityVersion()) + getOldClusterVersion().onOrAfter(Version.fromString("6.8.0")) ); if (isRunningAgainstOldCluster()) { final Request putSettingsRequest = new Request("PUT", "/_cluster/settings"); diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java index 7d0095dd81f63..7eb0a38ad8099 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/SystemIndexRestIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.http; import org.elasticsearch.Version; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Request; @@ -32,7 +33,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -234,7 +235,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli indexRequest.source(Map.of("some_field", "some_value")); return channel -> client.index( indexRequest, - new RestStatusToXContentListener<>(channel, r -> r.getLocation(indexRequest.routing())) + new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java index 617b78e4f1bb8..535b9b73c9dc2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.action; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.Requests; import org.elasticsearch.test.ESIntegTestCase; @@ -30,9 +29,9 @@ public void testThreadedListeners() throws Throwable { request.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); } - client.index(request, new ActionListener() { + client.index(request, new ActionListener() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { threadName.set(Thread.currentThread().getName()); latch.countDown(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 86ff08b5de36d..54c10499b0b3a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; @@ -25,7 +26,6 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchTransportService; @@ -454,7 +454,7 @@ public void waitForTaskCompletion(Task task) {} } // Need to run the task in a separate thread because node client's .execute() is blocked by our task listener index = new Thread(() -> { - IndexResponse indexResponse = client().prepareIndex("test").setSource("test", "test").get(); + DocWriteResponse indexResponse = client().prepareIndex("test").setSource("test", "test").get(); assertArrayEquals(ReplicationResponse.NO_FAILURES, indexResponse.getShardInfo().getFailures()); }); index.start(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java index 2dd662f6782fa..127d399eab04a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; @@ -117,7 +116,7 @@ public void testWriteToAliasPrimaryAutoCreatedFirst() throws Exception { client().execute(AutoCreateAction.INSTANCE, request).get(); } - IndexResponse response = client().prepareIndex(INDEX_NAME).setSource("{\"foo\":\"bar\"}", XContentType.JSON).get(); + DocWriteResponse response = client().prepareIndex(INDEX_NAME).setSource("{\"foo\":\"bar\"}", XContentType.JSON).get(); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.CREATED)); } @@ -136,7 +135,7 @@ public void testWriteToAliasSecondaryAutoCreatedFirst() throws Exception { client().execute(AutoCreateAction.INSTANCE, request).get(); } - IndexResponse response = client().prepareIndex(INDEX_NAME).setSource("{\"foo\":\"bar\"}", XContentType.JSON).get(); + DocWriteResponse response = client().prepareIndex(INDEX_NAME).setSource("{\"foo\":\"bar\"}", XContentType.JSON).get(); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.CREATED)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index 2a2238b8984fb..96e3939312870 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -10,10 +10,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.replication.ReplicationRequest; @@ -164,7 +164,7 @@ public void testDeleteIndexWhileIndexing() throws Exception { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { - IndexResponse response = client().prepareIndex(index) + DocWriteResponse response = client().prepareIndex(index) .setId(id) .setSource(Map.of("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index efa96ba05182d..7365e39049430 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateRequest; @@ -697,7 +696,7 @@ public void testNoopUpdate() { createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build()); internalCluster().ensureAtLeastNumDataNodes(2); ensureGreen(indexName); - IndexResponse doc = index(indexName, "1", Map.of("user", "xyz")); + DocWriteResponse doc = index(indexName, "1", Map.of("user", "xyz")); assertThat(doc.getShardInfo().getSuccessful(), equalTo(2)); final BulkResponse bulkResponse = client().prepareBulk() .add(new UpdateRequest().index(indexName).id("1").detectNoop(true).doc("user", "xyz")) // noop update diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index 655a13c154338..e44a5a6a48181 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -13,10 +13,10 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; @@ -125,7 +125,7 @@ public void testLocalClusterAlias() { indexRequest.id("1"); indexRequest.source("field", "value"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); TaskId parentTaskId = new TaskId("node", randomNonNegativeLong()); @@ -174,7 +174,7 @@ public void testAbsoluteStartMillis() { indexRequest.id("1"); indexRequest.source("date", "1970-01-01"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); } { @@ -182,7 +182,7 @@ public void testAbsoluteStartMillis() { indexRequest.id("1"); indexRequest.source("date", "1982-01-01"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); } { @@ -250,14 +250,14 @@ public void testFinalReduce() { IndexRequest indexRequest = new IndexRequest("test"); indexRequest.id("1"); indexRequest.source("price", 10); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); } { IndexRequest indexRequest = new IndexRequest("test"); indexRequest.id("2"); indexRequest.source("price", 100); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); } indicesAdmin().prepareRefresh("test").get(); @@ -552,7 +552,7 @@ private void indexSomeDocs(String indexName, int numberOfShards, int numberOfDoc createIndex(indexName, Settings.builder().put("index.number_of_shards", numberOfShards).build()); for (int i = 0; i < numberOfDocs; i++) { - IndexResponse indexResponse = client().prepareIndex(indexName).setSource("number", randomInt()).get(); + DocWriteResponse indexResponse = client().prepareIndex(indexName).setSource("number", randomInt()).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } indicesAdmin().prepareRefresh(indexName).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/AutoCreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/AutoCreateIndexIT.java index 6ed680ecc7034..e4ef0fa7f2d4f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/AutoCreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/AutoCreateIndexIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.action.support; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.test.ESIntegTestCase; @@ -41,7 +41,7 @@ public void testBatchingWithDeprecationWarnings() throws Exception { final var client = client(); client.prepareIndex("no-dot").setSource("{}", XContentType.JSON).execute(new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { try { final var warningHeaders = client.threadPool().getThreadContext().getResponseHeaders().get("Warning"); if (warningHeaders != null) { @@ -68,7 +68,7 @@ public void onFailure(Exception e) { client.prepareIndex(".has-dot").setSource("{}", XContentType.JSON).execute(new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { try { final var warningHeaders = client.threadPool().getThreadContext().getResponseHeaders().get("Warning"); assertNotNull(warningHeaders); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index c6313a0a9b791..9e50d57f5eb99 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -70,7 +69,7 @@ public void run() { } for (int i = 0; i < 10; i++) { // index data with mapping changes - IndexResponse response = client(dataNode).prepareIndex("myindex").setSource("field_" + i, "val").get(); + DocWriteResponse response = client(dataNode).prepareIndex("myindex").setSource("field_" + i, "val").get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 13eed96075a19..53d28d98f9695 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.aliases; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; @@ -16,7 +17,6 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -107,7 +107,7 @@ public void testAliases() throws Exception { }); logger.info("--> indexing against [alias1], should work now"); - IndexResponse indexResponse = client().index(new IndexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)) + DocWriteResponse indexResponse = client().index(new IndexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)) .actionGet(); assertThat(indexResponse.getIndex(), equalTo("test")); @@ -1322,7 +1322,7 @@ public void testIndexingAndQueryingHiddenAliases() throws Exception { ensureGreen(); // Put a couple docs in each index directly - IndexResponse res = client().index(new IndexRequest(nonWriteIndex).id("1").source(source("1", "nonwrite"), XContentType.JSON)) + DocWriteResponse res = client().index(new IndexRequest(nonWriteIndex).id("1").source(source("1", "nonwrite"), XContentType.JSON)) .get(); assertThat(res.status().getStatus(), equalTo(201)); res = client().index(new IndexRequest(writeIndex).id("2").source(source("2", "writeindex"), XContentType.JSON)).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/NetNewSystemIndexAliasIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/NetNewSystemIndexAliasIT.java index 105e9f5ec91f0..2e2340294d058 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/NetNewSystemIndexAliasIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/NetNewSystemIndexAliasIT.java @@ -9,10 +9,10 @@ package org.elasticsearch.aliases; import org.elasticsearch.Version; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -43,7 +43,7 @@ public void testGetAliasWithNetNewSystemIndices() throws Exception { { final IndexRequest request = new IndexRequest(SYSTEM_INDEX_NAME); request.source("some_field", "some_value"); - IndexResponse resp = client().index(request).get(); + DocWriteResponse resp = client().index(request).get(); assertThat(resp.status().getStatus(), is(201)); } ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java index 8628f2542390c..992dfaace6284 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java @@ -10,11 +10,11 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequestBuilder; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; @@ -127,7 +127,7 @@ private void canIndexDocument(String index) { try { IndexRequestBuilder builder = client().prepareIndex(index); builder.setSource("foo", "bar"); - IndexResponse r = builder.execute().actionGet(); + DocWriteResponse r = builder.execute().actionGet(); assertThat(r, notNullValue()); } catch (ClusterBlockException e) { fail(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleDataNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleDataNodesIT.java index 9687fcd4a52e7..8618104fadc26 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleDataNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleDataNodesIT.java @@ -8,11 +8,11 @@ package org.elasticsearch.cluster; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -77,7 +77,7 @@ public void testIndexingBeforeAndAfterDataNodesStart() { equalTo(false) ); - IndexResponse indexResponse = client().index(new IndexRequest("test").id("1").source(SOURCE, XContentType.JSON)).actionGet(); + DocWriteResponse indexResponse = client().index(new IndexRequest("test").id("1").source(SOURCE, XContentType.JSON)).actionGet(); assertThat(indexResponse.getId(), equalTo("1")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java index f569727729841..8458d83e4e9be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -283,7 +284,7 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // this request does not change the cluster state, because mapping is already created, // we don't await and cancel committed publication - ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); + ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); // Wait a bit to make sure that the reason why we did not get a response // is that cluster state processing is blocked and not just that it takes @@ -372,7 +373,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { assertEquals(minVersion, maxVersion); }); - final ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); + final ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); assertBusy(() -> assertTrue(client().prepareGet("index", "1").get().isExists())); @@ -381,7 +382,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { // if the dynamic mapping update is not applied on the replica yet. // this request does not change the cluster state, because the mapping is dynamic, // we need to await and cancel committed publication - ActionFuture dynamicMappingsFut = executeAndCancelCommittedPublication( + ActionFuture dynamicMappingsFut = executeAndCancelCommittedPublication( client().prepareIndex("index").setId("2").setSource("field2", 42) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 345dd07932901..8c63c5341bbc5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; @@ -520,14 +519,14 @@ public void testPrimaryReplicaResyncFailed() throws Exception { logger.info("--> Indexing with gap in seqno to ensure that some operations will be replayed in resync"); long numDocs = scaledRandomIntBetween(5, 50); for (int i = 0; i < numDocs; i++) { - IndexResponse indexResult = indexDoc("test", Long.toString(i)); + DocWriteResponse indexResult = indexDoc("test", Long.toString(i)); assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1)); } final IndexShard oldPrimaryShard = internalCluster().getInstance(IndicesService.class, oldPrimary).getShardOrNull(shardId); EngineTestCase.generateNewSeqNo(IndexShardTestCase.getEngine(oldPrimaryShard)); // Make gap in seqno. long moreDocs = scaledRandomIntBetween(1, 10); for (int i = 0; i < moreDocs; i++) { - IndexResponse indexResult = indexDoc("test", Long.toString(numDocs + i)); + DocWriteResponse indexResult = indexDoc("test", Long.toString(numDocs + i)); assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1)); } final Set replicasSide1 = Sets.newHashSet(randomSubsetOf(between(1, numberOfReplicas - 1), replicaNodes)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index ec5fa1647782f..ce9ec8b5fc75c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -11,10 +11,10 @@ import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -170,7 +170,7 @@ public void testAckedIndexing() throws Exception { indexRequestBuilder.setCreate(true); } - IndexResponse response = indexRequestBuilder.get(timeout); + DocWriteResponse response = indexRequestBuilder.get(timeout); assertThat(response.getResult(), is(oneOf(CREATED, UPDATED))); ackedDocs.put(id, node); logger.trace("[{}] indexed id [{}] through node [{}], response [{}]", name, id, node, response); @@ -293,7 +293,7 @@ public void testRejoinDocumentExistsInAllShardCopies() throws Exception { ensureStableCluster(2, notIsolatedNode); assertFalse(client(notIsolatedNode).admin().cluster().prepareHealth("test").setWaitForYellowStatus().get().isTimedOut()); - IndexResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test").setSource("field", "value").get(); + DocWriteResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test").setSource("field", "value").get(); assertThat(indexResponse.getVersion(), equalTo(1L)); logger.info("Verifying if document exists via node[{}]", notIsolatedNode); @@ -483,7 +483,7 @@ public void testRestartNodeWhileIndexing() throws Exception { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { - IndexResponse response = client().prepareIndex(index) + DocWriteResponse response = client().prepareIndex(index) .setId(id) .setSource(Map.of("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index edb7c96d2b6eb..828ae839cdd7d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.document; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -19,7 +20,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -56,7 +56,7 @@ public void testIndexActions() throws Exception { logger.info("Running Cluster Health"); ensureGreen(); logger.info("Indexing [type1/1]"); - IndexResponse indexResponse = client().prepareIndex() + DocWriteResponse indexResponse = client().prepareIndex() .setIndex("test") .setId("1") .setSource(source("1", "test")) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java index a99d97dfe18ff..75b818d082dff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java @@ -8,13 +8,13 @@ package org.elasticsearch.document; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.ClusterState; @@ -33,7 +33,7 @@ public class ShardInfoIT extends ESIntegTestCase { public void testIndexAndDelete() throws Exception { prepareIndex(1); - IndexResponse indexResponse = client().prepareIndex("idx").setSource("{}", XContentType.JSON).get(); + DocWriteResponse indexResponse = client().prepareIndex("idx").setSource("{}", XContentType.JSON).get(); assertShardInfo(indexResponse); DeleteResponse deleteResponse = client().prepareDelete("idx", indexResponse.getId()).get(); assertShardInfo(deleteResponse); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java index 91f17e90be24a..129b83f664927 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -203,7 +202,10 @@ public void testGetWithAliasPointingToMultipleIndices() { } else { indicesAdmin().prepareCreate("index3").addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).get(); } - IndexResponse indexResponse = client().prepareIndex("index1").setId("id").setSource(Collections.singletonMap("foo", "bar")).get(); + DocWriteResponse indexResponse = client().prepareIndex("index1") + .setId("id") + .setSource(Collections.singletonMap("foo", "bar")) + .get(); assertThat(indexResponse.status().getStatus(), equalTo(RestStatus.CREATED.getStatus())); IllegalArgumentException exception = expectThrows( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 165242128ca8a..ffa6799601b14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -8,10 +8,10 @@ package org.elasticsearch.index; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; @@ -133,7 +133,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { {"processors": [{"final": {"exists":"no_such_field"}}]}"""); clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index") + DocWriteResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Map.of("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -159,7 +159,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { {"processors": [{"final": {}}]}"""); clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index") + DocWriteResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Map.of("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -185,7 +185,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { {"processors": [{"final": {}}]}"""); clusterAdmin().putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index") + DocWriteResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Map.of("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -211,7 +211,7 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { {"processors": [{"final": {}}]}"""); clusterAdmin().putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index") + DocWriteResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Map.of("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -276,7 +276,7 @@ public void testRequestPipelineAndFinalPipeline() { index.setSource(Map.of("field", "value")); index.setPipeline("request_pipeline"); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - final IndexResponse response = index.get(); + final DocWriteResponse response = index.get(); assertThat(response.status(), equalTo(RestStatus.CREATED)); final GetRequestBuilder get = client().prepareGet("index", "1"); final GetResponse getResponse = get.get(); @@ -303,7 +303,7 @@ public void testDefaultAndFinalPipeline() { final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Map.of("field", "value")); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - final IndexResponse response = index.get(); + final DocWriteResponse response = index.get(); assertThat(response.status(), equalTo(RestStatus.CREATED)); final GetRequestBuilder get = client().prepareGet("index", "1"); final GetResponse getResponse = get.get(); @@ -350,7 +350,7 @@ public void testDefaultAndFinalPipelineFromTemplates() { final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Map.of("field", "value")); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - final IndexResponse response = index.get(); + final DocWriteResponse response = index.get(); assertThat(response.status(), equalTo(RestStatus.CREATED)); final GetRequestBuilder get = client().prepareGet("index", "1"); final GetResponse getResponse = get.get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java index 6ff7470252122..206aa57bc84b3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java @@ -8,13 +8,13 @@ package org.elasticsearch.index; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.UUIDs; @@ -369,7 +369,7 @@ public void testWritesWillSucceedIfBelowThreshold() throws Exception { // The write limits is set to 1MB. We will send up to 800KB to stay below that threshold. int thresholdToStopSending = 800 * 1024; - ArrayList> responses = new ArrayList<>(); + ArrayList> responses = new ArrayList<>(); long totalRequestSize = 0; while (totalRequestSize < thresholdToStopSending) { IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID()) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java index 4c7b5ee3e775e..302d6ce74d65f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Requests; @@ -57,7 +56,7 @@ public void createTestIndex() { } public void testIndex() { - IndexResponse index = client().prepareIndex("test") + DocWriteResponse index = client().prepareIndex("test") .setId("1") .setSource("foo", "bar") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -144,7 +143,7 @@ public void testBulk() { */ public void testNoRefreshInterval() throws InterruptedException, ExecutionException { updateIndexSettings(Settings.builder().put("index.refresh_interval", -1), "test"); - ActionFuture index = client().prepareIndex("test") + ActionFuture index = client().prepareIndex("test") .setId("1") .setSource("foo", "bar") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index f754d291c8010..56e1598bd7a15 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.engine; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; @@ -177,7 +177,7 @@ static IndexingResult indexDocs(int numRequests, int numThreads) throws Exceptio phaser.arriveAndAwaitAdvance(); while (completedRequests.incrementAndGet() <= numRequests) { try { - final IndexResponse resp = client().prepareIndex("test").setSource("{}", XContentType.JSON).get(); + final DocWriteResponse resp = client().prepareIndex("test").setSource("{}", XContentType.JSON).get(); numSuccess.incrementAndGet(); assertThat(resp.status(), equalTo(RestStatus.CREATED)); } catch (IllegalArgumentException e) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index 86b1cdf315f55..ce23c44cb96cc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.ClusterState; @@ -601,7 +600,7 @@ public void testSubobjectsFalseAtRoot() throws Exception { IndexRequest request = new IndexRequest("test").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source("host.name", "localhost", "host.id", 111, "time", 100, "time.max", 1000); - IndexResponse indexResponse = client().index(request).actionGet(); + DocWriteResponse indexResponse = client().index(request).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertBusy(() -> { @@ -646,7 +645,7 @@ public void testSubobjectsFalse() throws Exception { "foo.metrics.time.max", 1000 ); - IndexResponse indexResponse = client().index(request).actionGet(); + DocWriteResponse indexResponse = client().index(request).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertBusy(() -> { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index ac25674456fde..38a878fee34c9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.shard; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -125,21 +125,18 @@ private void runTestAutomaticRefresh(final IntToLongFunction count) throws Inter started.await(); assertThat(count.applyAsLong(totalNumDocs.get()), equalTo(1L)); for (int i = 1; i < numDocs; i++) { - client().prepareIndex("test") - .setId("" + i) - .setSource("{\"foo\" : \"bar\"}", XContentType.JSON) - .execute(new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - indexingDone.countDown(); - } - - @Override - public void onFailure(Exception e) { - indexingDone.countDown(); - throw new AssertionError(e); - } - }); + client().prepareIndex("test").setId("" + i).setSource("{\"foo\" : \"bar\"}", XContentType.JSON).execute(new ActionListener<>() { + @Override + public void onResponse(DocWriteResponse indexResponse) { + indexingDone.countDown(); + } + + @Override + public void onFailure(Exception e) { + indexingDone.countDown(); + throw new AssertionError(e); + } + }); } indexingDone.await(); t.join(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index eef2c21f432db..6c1b9a56f04cf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -103,7 +103,7 @@ public void testCreatedFlag() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); + DocWriteResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); @@ -120,7 +120,7 @@ public void testCreatedFlagWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); + DocWriteResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); client().prepareDelete("test", "1").execute().actionGet(); @@ -148,7 +148,7 @@ public void testCreatedFlagParallelExecution() throws Exception { @Override public Void call() throws Exception { int docId = random.nextInt(docCount); - IndexResponse indexResponse = indexDoc("test", Integer.toString(docId), "field1", "value"); + DocWriteResponse indexResponse = indexDoc("test", Integer.toString(docId), "field1", "value"); if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) { createdCounts.incrementAndGet(docId); } @@ -169,7 +169,7 @@ public void testCreatedFlagWithExternalVersioning() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId("1") .setSource("field1", "value1_1") .setVersion(123) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java index 8649946308e86..64cd59fef36f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.core.Strings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -61,9 +61,9 @@ public void testConcurrentDynamicMapping() throws Exception { client().prepareIndex("test") .setId(Integer.toString(currentID++)) .setSource(source) - .execute(new ActionListener() { + .execute(new ActionListener() { @Override - public void onResponse(IndexResponse response) { + public void onResponse(DocWriteResponse response) { latch.countDown(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java index b41b4727dc12b..08800587cab3c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; @@ -41,7 +40,7 @@ public void testPrimaryRelocationWhileIndexing() throws Exception { @Override public void run() { while (finished.get() == false && numAutoGenDocs.get() < 10_000) { - IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); + DocWriteResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 8ba0f1364bac8..fa07e8ffd9207 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -25,6 +25,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -35,7 +36,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -1506,7 +1506,7 @@ public void testPeerRecoveryTrimsLocalTranslog() throws Exception { indexers[i] = new Thread(() -> { while (stopped.get() == false) { try { - IndexResponse response = client().prepareIndex(indexName) + DocWriteResponse response = client().prepareIndex(indexName) .setSource(Map.of("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); assertThat(response.getResult(), is(oneOf(CREATED, UPDATED))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index 2b628cacac01c..70ef73862016a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -9,11 +9,11 @@ package org.elasticsearch.indices.state; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -363,7 +363,7 @@ public void testTranslogStats() throws Exception { final int nbDocs = randomIntBetween(0, 50); int uncommittedOps = 0; for (long i = 0; i < nbDocs; i++) { - final IndexResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); + final DocWriteResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); assertThat(indexResponse.status(), is(RestStatus.CREATED)); if (rarely()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index bd89812c2a185..53df5a1d3c834 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.WriteRequest; @@ -1320,7 +1319,7 @@ public void testConcurrentIndexingAndStatsRequests() throws BrokenBarrierExcepti } while (stop.get() == false) { final String id = Integer.toString(idGenerator.incrementAndGet()); - final IndexResponse response = client().prepareIndex("test").setId(id).setSource("{}", XContentType.JSON).get(); + final DocWriteResponse response = client().prepareIndex("test").setId(id).setSource("{}", XContentType.JSON).get(); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.CREATED)); } }); @@ -1389,7 +1388,7 @@ public void testWriteLoadIsCaptured() throws Exception { final AtomicInteger idGenerator = new AtomicInteger(); assertBusy(() -> { final int numDocs = randomIntBetween(15, 25); - final List> indexRequestFutures = new ArrayList<>(numDocs); + final List> indexRequestFutures = new ArrayList<>(numDocs); for (int i = 0; i < numDocs; i++) { indexRequestFutures.add( client().prepareIndex(indexName) @@ -1399,7 +1398,7 @@ public void testWriteLoadIsCaptured() throws Exception { ); } - for (ActionFuture indexRequestFuture : indexRequestFutures) { + for (ActionFuture indexRequestFuture : indexRequestFutures) { assertThat(indexRequestFuture.get().getResult(), equalTo(DocWriteResponse.Result.CREATED)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index ab852cf341514..936e9bf87c788 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -11,11 +11,11 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.tests.util.English; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; @@ -603,7 +603,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E logger.info("--> flush so we have an actual index"); indicesAdmin().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); - final List> pendingIndexResponses = new ArrayList<>(); + final List> pendingIndexResponses = new ArrayList<>(); for (int i = 10; i < 20; i++) { pendingIndexResponses.add( client().prepareIndex("test") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java index a1cce85cb0af2..7d444eef787c0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.RepositoryCleanupInProgress; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; @@ -97,7 +98,7 @@ private ActionFuture startBlockedCleanup(String repoN garbageFuture, () -> repository.blobStore() .blobContainer(repository.basePath()) - .writeBlob("snap-foo.dat", new BytesArray(new byte[1]), true) + .writeBlob(OperationPurpose.SNAPSHOT, "snap-foo.dat", new BytesArray(new byte[1]), true) ) ); garbageFuture.get(); @@ -145,7 +146,12 @@ public void testCleanupOldIndexN() throws ExecutionException, InterruptedExcepti createOldIndexNFuture, () -> repository.blobStore() .blobContainer(repository.basePath()) - .writeBlob(BlobStoreRepository.INDEX_FILE_PREFIX + generation, new BytesArray(new byte[1]), true) + .writeBlob( + OperationPurpose.SNAPSHOT, + BlobStoreRepository.INDEX_FILE_PREFIX + generation, + new BytesArray(new byte[1]), + true + ) ) ); createOldIndexNFuture.get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index dbfae5e9eea24..519e839c5d322 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; @@ -100,7 +99,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe boolean[] added = new boolean[numDocs]; for (int i = 0; i < numDocs; i++) { try { - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId("" + i) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test", English.intToEnglish(i)) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index e829c6da01b26..dba26d0560a14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; @@ -124,7 +123,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc for (int i = 0; i < numDocs; i++) { added[i] = false; try { - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId(Integer.toString(i)) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test", English.intToEnglish(i)) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 504f5265b4ba1..598c65b8c999d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; @@ -1415,7 +1414,7 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { ) ); - IndexResponse indexResponse1 = client().prepareIndex("test") + DocWriteResponse indexResponse1 = client().prepareIndex("test") .setId("1") .setSource( jsonBuilder().startObject() @@ -1469,7 +1468,7 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { .get(); assertTrue(indexResponse1.getShardInfo().getSuccessful() > 0); - IndexResponse indexResponse2 = client().prepareIndex("test") + DocWriteResponse indexResponse2 = client().prepareIndex("test") .setId("2") .setSource( jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 1ce873a133f66..7b2ba7cb46770 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -10,8 +10,8 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -168,7 +168,7 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception { LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); - IndexResponse indexResponse = client().prepareIndex(INDEX) + DocWriteResponse indexResponse = client().prepareIndex(INDEX) .setId("1") .setSource( jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/timeseries/support/TimeSeriesDimensionsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/timeseries/support/TimeSeriesDimensionsLimitIT.java index 379539c3130c5..b3cb2e5f178ca 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/timeseries/support/TimeSeriesDimensionsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/timeseries/support/TimeSeriesDimensionsLimitIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.timeseries.support; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; @@ -141,7 +141,7 @@ public void testTotalDimensionFieldsSizeLuceneLimit() throws IOException { for (int i = 0; i < dimensionFieldLimit; i++) { source.put(dimensionFieldNames.get(i), randomAlphaOfLength(1024)); } - final IndexResponse indexResponse = client().prepareIndex("test").setSource(source).get(); + final DocWriteResponse indexResponse = client().prepareIndex("test").setSource(source).get(); assertEquals(RestStatus.CREATED.getStatus(), indexResponse.status().getStatus()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/update/UpdateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/update/UpdateIT.java index 98fe88c3b5327..4e97560284c67 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/update/UpdateIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; @@ -452,7 +451,7 @@ public void testUpdateWithIfSeqNo() throws Exception { createTestIndex(); ensureGreen(); - IndexResponse result = client().prepareIndex("test").setId("1").setSource("field", 1).get(); + DocWriteResponse result = client().prepareIndex("test").setId("1").setSource("field", 1).get(); expectThrows( VersionConflictEngineException.class, () -> client().prepareUpdate(indexOrAlias(), "1") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java index 9d84a1c4727be..b191eb0cf4fe3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.versioning; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; @@ -31,9 +31,9 @@ public void testConcurrentOperationOnSameDoc() throws Exception { final AtomicReference failure = new AtomicReference<>(); final CountDownLatch latch = new CountDownLatch(numberOfUpdates); for (int i = 0; i < numberOfUpdates; i++) { - client().prepareIndex("test").setId("1").setSource("field1", i).execute(new ActionListener() { + client().prepareIndex("test").setId("1").setSource("field1", i).execute(new ActionListener<>() { @Override - public void onResponse(IndexResponse response) { + public void onResponse(DocWriteResponse response) { latch.countDown(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java b/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java index 08cc5fa784fc1..7d9e1f3955089 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java @@ -8,8 +8,8 @@ package org.elasticsearch.versioning; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.coordination.LinearizabilityChecker; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; @@ -237,7 +237,7 @@ public void run() { Consumer historyResponse = partition.invoke(version); try { // we should be able to remove timeout or fail hard on timeouts - IndexResponse indexResponse = client().index(indexRequest).actionGet(timeout, TimeUnit.SECONDS); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(timeout, TimeUnit.SECONDS); IndexResponseHistoryOutput historyOutput = new IndexResponseHistoryOutput(indexResponse); historyResponse.accept(historyOutput); // validate version and seqNo strictly increasing for successful CAS to avoid that overhead during @@ -515,7 +515,7 @@ private interface HistoryOutput extends NamedWriteable { private static class IndexResponseHistoryOutput implements HistoryOutput { private final Version outputVersion; - private IndexResponseHistoryOutput(IndexResponse response) { + private IndexResponseHistoryOutput(DocWriteResponse response) { this(new Version(response.getPrimaryTerm(), response.getSeqNo())); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java index c4a56770d186e..c6fbdc909e2e6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -65,7 +65,7 @@ public void testExternalVersioningInitialDelete() throws Exception { VersionConflictEngineException.class ); - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId("1") .setSource("field1", "value1_1") .setVersion(18) @@ -78,7 +78,7 @@ public void testExternalVersioningInitialDelete() throws Exception { public void testExternalGTE() throws Exception { createIndex("test"); - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId("1") .setSource("field1", "value1_1") .setVersion(12) @@ -151,7 +151,7 @@ public void testExternalVersioning() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId("1") .setSource("field1", "value1_1") .setVersion(12) @@ -268,7 +268,7 @@ public void testCompareAndSetInitialDelete() throws Exception { VersionConflictEngineException.class ); - IndexResponse indexResponse = client().prepareIndex("test") + DocWriteResponse indexResponse = client().prepareIndex("test") .setId("1") .setSource("field1", "value1_1") .setCreate(true) @@ -281,7 +281,7 @@ public void testCompareAndSet() { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); + DocWriteResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertThat(indexResponse.getSeqNo(), equalTo(0L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); @@ -366,7 +366,7 @@ public void testSimpleVersioningWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").get(); + DocWriteResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").get(); assertThat(indexResponse.getSeqNo(), equalTo(0L)); client().admin().indices().prepareFlush().execute().actionGet(); @@ -834,7 +834,7 @@ public void testGCDeletesZero() throws Exception { public void testSpecialVersioning() { internalCluster().ensureAtLeastNumDataNodes(2); createIndex("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build()); - IndexResponse doc1 = client().prepareIndex("test") + DocWriteResponse doc1 = client().prepareIndex("test") .setId("1") .setSource("field", "value1") .setVersion(0) @@ -842,7 +842,7 @@ public void testSpecialVersioning() { .execute() .actionGet(); assertThat(doc1.getVersion(), equalTo(0L)); - IndexResponse doc2 = client().prepareIndex("test") + DocWriteResponse doc2 = client().prepareIndex("test") .setId("1") .setSource("field", "value2") .setVersion(Versions.MATCH_ANY) @@ -851,7 +851,7 @@ public void testSpecialVersioning() { .actionGet(); assertThat(doc2.getVersion(), equalTo(1L)); client().prepareDelete("test", "1").get(); // v2 - IndexResponse doc3 = client().prepareIndex("test") + DocWriteResponse doc3 = client().prepareIndex("test") .setId("1") .setSource("field", "value3") .setVersion(Versions.MATCH_DELETED) @@ -859,7 +859,7 @@ public void testSpecialVersioning() { .execute() .actionGet(); assertThat(doc3.getVersion(), equalTo(3L)); - IndexResponse doc4 = client().prepareIndex("test") + DocWriteResponse doc4 = client().prepareIndex("test") .setId("1") .setSource("field", "value4") .setVersion(4L) diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 99ce5910c9775..1a082e7558577 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -385,6 +385,7 @@ org.elasticsearch.serverless.apifiltering; exports org.elasticsearch.telemetry.tracing; exports org.elasticsearch.telemetry; + exports org.elasticsearch.telemetry.metric; provides java.util.spi.CalendarDataProvider with org.elasticsearch.common.time.IsoCalendarDataProvider; provides org.elasticsearch.xcontent.ErrorOnUnknown with org.elasticsearch.common.xcontent.SuggestingErrorOnUnknown; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5f120134acb04..62f39260aeec4 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -145,8 +145,12 @@ static TransportVersion def(int id) { public static final TransportVersion WAIT_FOR_CLUSTER_STATE_IN_RECOVERY_ADDED = def(8_502_00_0); public static final TransportVersion RECOVERY_COMMIT_TOO_NEW_EXCEPTION_ADDED = def(8_503_00_0); public static final TransportVersion NODE_INFO_COMPONENT_VERSIONS_ADDED = def(8_504_00_0); - public static final TransportVersion COMPACT_FIELD_CAPS_ADDED = def(8_505_00_0); + public static final TransportVersion DATA_STREAM_RESPONSE_INDEX_PROPERTIES = def(8_506_00_0); + public static final TransportVersion ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED = def(8_507_00_0); + public static final TransportVersion LONG_COUNT_IN_HISTOGRAM_ADDED = def(8_508_00_0); + public static final TransportVersion INFERENCE_MODEL_SECRETS_ADDED = def(8_509_00_0); + public static final TransportVersion NODE_INFO_REQUEST_SIMPLIFIED = def(8_510_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 7395d6003ec44..846a0800cc41f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -677,7 +677,7 @@ public void reg actions.register(ListTasksAction.INSTANCE, TransportListTasksAction.class); actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class); actions.register(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class); - actions.register(GetHealthAction.INSTANCE, GetHealthAction.TransportAction.class); + actions.register(GetHealthAction.INSTANCE, GetHealthAction.LocalAction.class); actions.register(PrevalidateNodeRemovalAction.INSTANCE, TransportPrevalidateNodeRemovalAction.class); actions.register(HealthApiStatsAction.INSTANCE, HealthApiStatsTransportAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 9ea073a69d4d4..b6e5a51c117b7 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.Index; @@ -25,6 +24,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -41,7 +41,7 @@ /** * A base class for the response of a write operation that involves a single doc */ -public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContentObject { +public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, ToXContentObject { private static final String _SHARDS = "_shards"; private static final String _INDEX = "_index"; @@ -216,7 +216,6 @@ public void setForcedRefresh(boolean forcedRefresh) { } /** returns the rest status for this response (based on {@link ShardInfo#status()} */ - @Override public RestStatus status() { return getShardInfo().status(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index abe51b35f89e2..19e84e7443eed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -18,12 +18,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -37,7 +37,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class ClusterHealthResponse extends ActionResponse implements StatusToXContentObject { +public class ClusterHealthResponse extends ActionResponse implements ToXContentObject { private static final String CLUSTER_NAME = "cluster_name"; private static final String STATUS = "status"; private static final String TIMED_OUT = "timed_out"; @@ -333,7 +333,6 @@ public String toString() { return Strings.toString(this); } - @Override public RestStatus status() { return isTimedOut() ? RestStatus.REQUEST_TIMEOUT : RestStatus.OK; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 04b2a7d980678..d7734f7a0eea3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -85,6 +85,13 @@ public NodesInfoRequest addMetric(String metric) { */ public NodesInfoRequest addMetrics(String... metrics) { SortedSet metricsSet = new TreeSet<>(Set.of(metrics)); + return addMetrics(metricsSet); + } + + /** + * Add multiple metrics + */ + public NodesInfoRequest addMetrics(Set metricsSet) { if (NodesInfoMetrics.Metric.allMetrics().containsAll(metricsSet) == false) { metricsSet.removeAll(NodesInfoMetrics.Metric.allMetrics()); String plural = metricsSet.size() == 1 ? "" : "s"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 14dffce86daa5..c263bea92ffa3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -26,6 +26,8 @@ import java.util.List; import java.util.Set; +import static org.elasticsearch.TransportVersions.NODE_INFO_REQUEST_SIMPLIFIED; + public class TransportNodesInfoAction extends TransportNodesAction< NodesInfoRequest, NodesInfoResponse, @@ -76,8 +78,7 @@ protected NodeInfo newNodeResponse(StreamInput in, DiscoveryNode node) throws IO @Override protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest, Task task) { - NodesInfoRequest request = nodeRequest.request; - Set metrics = request.requestedMetrics(); + Set metrics = nodeRequest.requestedMetrics(); return nodeService.info( metrics.contains(NodesInfoMetrics.Metric.SETTINGS.metricName()), metrics.contains(NodesInfoMetrics.Metric.OS.metricName()), @@ -96,21 +97,33 @@ protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest, Task task) { public static class NodeInfoRequest extends TransportRequest { - NodesInfoRequest request; + private NodesInfoMetrics nodesInfoMetrics; public NodeInfoRequest(StreamInput in) throws IOException { super(in); - request = new NodesInfoRequest(in); + if (in.getTransportVersion().onOrAfter(NODE_INFO_REQUEST_SIMPLIFIED)) { + this.nodesInfoMetrics = new NodesInfoMetrics(in); + } else { + this.nodesInfoMetrics = new NodesInfoRequest(in).getNodesInfoMetrics(); + } } public NodeInfoRequest(NodesInfoRequest request) { - this.request = request; + this.nodesInfoMetrics = request.getNodesInfoMetrics(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - request.writeTo(out); + if (out.getTransportVersion().onOrAfter(NODE_INFO_REQUEST_SIMPLIFIED)) { + this.nodesInfoMetrics.writeTo(out); + } else { + new NodesInfoRequest().clear().addMetrics(nodesInfoMetrics.requestedMetrics()).writeTo(out); + } + } + + public Set requestedMetrics() { + return nodesInfoMetrics.requestedMetrics(); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java index 781947b9db814..e7568a0c66a37 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java @@ -12,11 +12,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptContextInfo; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -30,7 +29,7 @@ import java.util.function.Function; import java.util.stream.Collectors; -public class GetScriptContextResponse extends ActionResponse implements StatusToXContentObject { +public class GetScriptContextResponse extends ActionResponse implements ToXContentObject { private static final ParseField CONTEXTS = new ParseField("contexts"); final Map contexts; @@ -87,11 +86,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public RestStatus status() { - return RestStatus.OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject().startArray(CONTEXTS.getPreferredName()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponse.java index 4bd2b63c435b1..36fe688b396da 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponse.java @@ -12,16 +12,15 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptLanguagesInfo; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -public class GetScriptLanguageResponse extends ActionResponse implements StatusToXContentObject, Writeable { +public class GetScriptLanguageResponse extends ActionResponse implements ToXContentObject, Writeable { public final ScriptLanguagesInfo info; GetScriptLanguageResponse(ScriptLanguagesInfo info) { @@ -38,11 +37,6 @@ public void writeTo(StreamOutput out) throws IOException { info.writeTo(out); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - public static GetScriptLanguageResponse fromXContent(XContentParser parser) throws IOException { return new GetScriptLanguageResponse(ScriptLanguagesInfo.fromXContent(parser)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 9331ded7a2de3..0202a0355abb6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -11,12 +11,12 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.StoredScriptSource; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -26,7 +26,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class GetStoredScriptResponse extends ActionResponse implements StatusToXContentObject { +public class GetStoredScriptResponse extends ActionResponse implements ToXContentObject { public static final ParseField _ID_PARSE_FIELD = new ParseField("_id"); public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); @@ -84,7 +84,6 @@ public StoredScriptSource getSource() { return source; } - @Override public RestStatus status() { return source != null ? RestStatus.OK : RestStatus.NOT_FOUND; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java index b61b6318a20b6..dadd493c780eb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/ListDanglingIndicesResponse.java @@ -14,8 +14,8 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,7 +34,7 @@ * information for each dangling index is presented under the "dangling_indices" key. If any nodes * in the cluster failed to answer, the details are presented under the "_nodes.failures" key. */ -public class ListDanglingIndicesResponse extends BaseNodesResponse implements StatusToXContentObject { +public class ListDanglingIndicesResponse extends BaseNodesResponse implements ToXContentObject { public ListDanglingIndicesResponse(StreamInput in) throws IOException { super(in); @@ -48,7 +48,6 @@ public ListDanglingIndicesResponse( super(clusterName, nodes, failures); } - @Override public RestStatus status() { return this.hasFailures() ? RestStatus.INTERNAL_SERVER_ERROR : RestStatus.OK; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index e0b15558b860d..76259d899c90a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; @@ -30,6 +29,7 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -44,14 +44,13 @@ * Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id * of the relevant action, and if it has failed or not (with the failure message in case it failed). */ -public class BulkItemResponse implements Writeable, StatusToXContentObject { +public class BulkItemResponse implements Writeable, ToXContentObject { private static final String _INDEX = "_index"; private static final String _ID = "_id"; private static final String STATUS = "status"; private static final String ERROR = "error"; - @Override public RestStatus status() { return failure == null ? response.status() : failure.getStatus(); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index aa69ede54dea1..9c1fb63a6b8d0 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -32,8 +33,11 @@ import java.time.Instant; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.DATA_STREAM_RESPONSE_INDEX_PROPERTIES; + public class GetDataStreamAction extends ActionType { public static final GetDataStreamAction INSTANCE = new GetDataStreamAction(); @@ -142,12 +146,28 @@ public Request includeDefaults(boolean includeDefaults) { } public static class Response extends ActionResponse implements ToXContentObject { + + public enum ManagedBy { + ILM("Index Lifecycle Management"), + LIFECYCLE("Data stream lifecycle"), + UNMANAGED("Unmanaged"); + + public final String displayValue; + + ManagedBy(String displayValue) { + this.displayValue = displayValue; + } + } + public static final ParseField DATA_STREAMS_FIELD = new ParseField("data_streams"); public static class DataStreamInfo implements SimpleDiffable, ToXContentObject { public static final ParseField STATUS_FIELD = new ParseField("status"); public static final ParseField INDEX_TEMPLATE_FIELD = new ParseField("template"); + public static final ParseField PREFER_ILM = new ParseField("prefer_ilm"); + public static final ParseField MANAGED_BY = new ParseField("managed_by"); + public static final ParseField NEXT_GENERATION_INDEX_MANAGED_BY = new ParseField("next_generation_managed_by"); public static final ParseField ILM_POLICY_FIELD = new ParseField("ilm_policy"); public static final ParseField LIFECYCLE_FIELD = new ParseField("lifecycle"); public static final ParseField HIDDEN_FIELD = new ParseField("hidden"); @@ -167,28 +187,39 @@ public static class DataStreamInfo implements SimpleDiffable, To private final String ilmPolicyName; @Nullable private final TimeSeries timeSeries; + private final Map indexSettingsValues; + private final boolean templatePreferIlmValue; public DataStreamInfo( DataStream dataStream, ClusterHealthStatus dataStreamStatus, @Nullable String indexTemplate, @Nullable String ilmPolicyName, - @Nullable TimeSeries timeSeries + @Nullable TimeSeries timeSeries, + Map indexSettingsValues, + boolean templatePreferIlmValue ) { this.dataStream = dataStream; this.dataStreamStatus = dataStreamStatus; this.indexTemplate = indexTemplate; this.ilmPolicyName = ilmPolicyName; this.timeSeries = timeSeries; + this.indexSettingsValues = indexSettingsValues; + this.templatePreferIlmValue = templatePreferIlmValue; } + @SuppressWarnings("unchecked") DataStreamInfo(StreamInput in) throws IOException { this( new DataStream(in), ClusterHealthStatus.readFrom(in), in.readOptionalString(), in.readOptionalString(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) ? in.readOptionalWriteable(TimeSeries::new) : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0) ? in.readOptionalWriteable(TimeSeries::new) : null, + in.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES) + ? in.readMap(Index::new, IndexProperties::new) + : Map.of(), + in.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES) ? in.readBoolean() : true ); } @@ -215,6 +246,14 @@ public TimeSeries getTimeSeries() { return timeSeries; } + public Map getIndexSettingsValues() { + return indexSettingsValues; + } + + public boolean templatePreferIlmValue() { + return templatePreferIlmValue; + } + @Override public void writeTo(StreamOutput out) throws IOException { dataStream.writeTo(out); @@ -224,6 +263,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0)) { out.writeOptionalWriteable(timeSeries); } + if (out.getTransportVersion().onOrAfter(DATA_STREAM_RESPONSE_INDEX_PROPERTIES)) { + out.writeMap(indexSettingsValues); + out.writeBoolean(templatePreferIlmValue); + } } @Override @@ -242,7 +285,27 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla .startObject() .field(DataStream.NAME_FIELD.getPreferredName(), DataStream.TIMESTAMP_FIELD_NAME) .endObject(); - builder.xContentList(DataStream.INDICES_FIELD.getPreferredName(), dataStream.getIndices()); + + builder.field(DataStream.INDICES_FIELD.getPreferredName()); + if (dataStream.getIndices() == null) { + builder.nullValue(); + } else { + builder.startArray(); + for (Index index : dataStream.getIndices()) { + builder.startObject(); + index.toXContentFragment(builder); + IndexProperties indexProperties = indexSettingsValues.get(index); + if (indexProperties != null) { + builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); + if (indexProperties.ilmPolicyName() != null) { + builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); + } + builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); + } + builder.endObject(); + } + builder.endArray(); + } builder.field(DataStream.GENERATION_FIELD.getPreferredName(), dataStream.getGeneration()); if (dataStream.getMetadata() != null) { builder.field(DataStream.METADATA_FIELD.getPreferredName(), dataStream.getMetadata()); @@ -258,6 +321,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla if (ilmPolicyName != null) { builder.field(ILM_POLICY_FIELD.getPreferredName(), ilmPolicyName); } + builder.field(NEXT_GENERATION_INDEX_MANAGED_BY.getPreferredName(), getNextGenerationManagedBy().displayValue); + builder.field(PREFER_ILM.getPreferredName(), templatePreferIlmValue); builder.field(HIDDEN_FIELD.getPreferredName(), dataStream.isHidden()); builder.field(SYSTEM_FIELD.getPreferredName(), dataStream.isSystem()); builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), dataStream.isAllowCustomRouting()); @@ -280,21 +345,55 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla return builder; } + /** + * Computes and returns which system will manage the next generation for this data stream. + */ + public ManagedBy getNextGenerationManagedBy() { + // both ILM and DSL are configured so let's check the prefer_ilm setting to see which system takes precedence + if (ilmPolicyName != null && dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + return templatePreferIlmValue ? ManagedBy.ILM : ManagedBy.LIFECYCLE; + } + + if (ilmPolicyName != null) { + return ManagedBy.ILM; + } + + if (dataStream.getLifecycle() != null && dataStream.getLifecycle().isEnabled()) { + return ManagedBy.LIFECYCLE; + } + + return ManagedBy.UNMANAGED; + } + @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } DataStreamInfo that = (DataStreamInfo) o; - return dataStream.equals(that.dataStream) + return templatePreferIlmValue == that.templatePreferIlmValue + && Objects.equals(dataStream, that.dataStream) && dataStreamStatus == that.dataStreamStatus && Objects.equals(indexTemplate, that.indexTemplate) && Objects.equals(ilmPolicyName, that.ilmPolicyName) - && Objects.equals(timeSeries, that.timeSeries); + && Objects.equals(timeSeries, that.timeSeries) + && Objects.equals(indexSettingsValues, that.indexSettingsValues); } @Override public int hashCode() { - return Objects.hash(dataStream, dataStreamStatus, indexTemplate, ilmPolicyName, timeSeries); + return Objects.hash( + dataStream, + dataStreamStatus, + indexTemplate, + ilmPolicyName, + timeSeries, + indexSettingsValues, + templatePreferIlmValue + ); } } @@ -326,6 +425,23 @@ public int hashCode() { } } + /** + * Encapsulates the configured properties we want to display for each backing index. + * They'll usually be settings values, but could also be additional properties derived from settings. + */ + public record IndexProperties(boolean preferIlm, @Nullable String ilmPolicyName, ManagedBy managedBy) implements Writeable { + public IndexProperties(StreamInput in) throws IOException { + this(in.readBoolean(), in.readOptionalString(), in.readEnum(ManagedBy.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(preferIlm); + out.writeOptionalString(ilmPolicyName); + out.writeEnum(managedBy); + } + } + private final List dataStreams; @Nullable private final RolloverConfiguration rolloverConfiguration; diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index 51e7509863796..58c50df47c3ce 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -13,13 +13,13 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -33,7 +33,7 @@ /** * Response containing the score explanation. */ -public class ExplainResponse extends ActionResponse implements StatusToXContentObject { +public class ExplainResponse extends ActionResponse implements ToXContentObject { private static final ParseField _INDEX = new ParseField("_index"); private static final ParseField _ID = new ParseField("_id"); @@ -44,9 +44,9 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO private static final ParseField DETAILS = new ParseField("details"); private static final ParseField GET = new ParseField("get"); - private String index; - private String id; - private boolean exists; + private final String index; + private final String id; + private final boolean exists; private Explanation explanation; private GetResult getResult; @@ -110,7 +110,6 @@ public GetResult getGetResult() { return getResult; } - @Override public RestStatus status() { return exists ? RestStatus.OK : RestStatus.NOT_FOUND; } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 7dc73940ce2ff..969d86f5f470c 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -178,7 +178,7 @@ static Map retrieveFieldCaps( false, false, null, - Collections.emptyMap() + Map.of() ); responseMap.put(parentField, fieldCap); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java index e9e3a05169afc..06ea2dee17481 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -203,14 +203,6 @@ public Map get() { return responseMap; } - /** - * - * Get the field capabilities for the provided {@code field} - */ - public IndexFieldCapabilities getField(String field) { - return responseMap.get(field); - } - TransportVersion getOriginVersion() { return originVersion; } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java index ef609a06cb8be..de2f6965e011d 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java @@ -63,7 +63,7 @@ public static IndexFieldCapabilities readFrom(StreamInput in) throws IOException isAggregatable, isDimension, metricType, - in.readMap(StreamInput::readString) + in.readImmutableMap(StreamInput::readString) ); } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java index 9ac379934c358..3cb01be92e734 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java @@ -9,13 +9,17 @@ package org.elasticsearch.action.index; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; -public class IndexAction extends ActionType { +public class IndexAction extends ActionType { public static final IndexAction INSTANCE = new IndexAction(); public static final String NAME = "indices:data/write/index"; private IndexAction() { - super(NAME, IndexResponse::new); + super(NAME, in -> { + assert false : "Might not be an IndexResponse!"; + return new IndexResponse(in); + }); } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 3a190c4e314c0..360b470eb1ab4 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.index; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -23,7 +24,7 @@ /** * An index document action request builder. */ -public class IndexRequestBuilder extends ReplicationRequestBuilder +public class IndexRequestBuilder extends ReplicationRequestBuilder implements WriteRequestBuilder { @@ -84,7 +85,7 @@ public IndexRequestBuilder setSource(Map source, XContentType content /** * Sets the document source to index. *

- * Note, its preferable to either set it using {@link #setSource(org.elasticsearch.common.xcontent.XContentBuilder)} + * Note, its preferable to either set it using {@link #setSource(org.elasticsearch.xcontent.XContentBuilder)} * or using the {@link #setSource(byte[], XContentType)}. */ public IndexRequestBuilder setSource(String source, XContentType xContentType) { diff --git a/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index e837f59ce3ff3..6c75374d51012 100644 --- a/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.index; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportSingleItemBulkWriteAction; import org.elasticsearch.action.support.ActionFilters; @@ -27,7 +28,7 @@ * Deprecated use TransportBulkAction with a single item instead */ @Deprecated -public class TransportIndexAction extends TransportSingleItemBulkWriteAction { +public class TransportIndexAction extends TransportSingleItemBulkWriteAction { @Inject public TransportIndexAction(ActionFilters actionFilters, TransportService transportService, TransportBulkAction bulkAction) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index 10aff29b8e706..bc9c88a706f30 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -13,9 +13,9 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser.Token; @@ -29,9 +29,9 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -public class GetPipelineResponse extends ActionResponse implements StatusToXContentObject { +public class GetPipelineResponse extends ActionResponse implements ToXContentObject { - private List pipelines; + private final List pipelines; private final boolean summary; public GetPipelineResponse(StreamInput in) throws IOException { @@ -76,7 +76,6 @@ public boolean isSummary() { return summary; } - @Override public RestStatus status() { return isFound() ? RestStatus.OK : RestStatus.NOT_FOUND; } diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java index 466973af37d8a..0a7b53ea8b9c4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java @@ -11,11 +11,11 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -25,7 +25,7 @@ import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class ClearScrollResponse extends ActionResponse implements StatusToXContentObject { +public class ClearScrollResponse extends ActionResponse implements ToXContentObject { private static final ParseField SUCCEEDED = new ParseField("succeeded"); private static final ParseField NUMFREED = new ParseField("num_freed"); @@ -69,7 +69,6 @@ public int getNumFreed() { return numFreed; } - @Override public RestStatus status() { return numFreed == 0 ? NOT_FOUND : OK; } diff --git a/server/src/main/java/org/elasticsearch/action/search/RestClosePointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/RestClosePointInTimeAction.java index 320d2b81ca413..a9da16bd62026 100644 --- a/server/src/main/java/org/elasticsearch/action/search/RestClosePointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/RestClosePointInTimeAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -40,6 +40,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC try (XContentParser parser = request.contentOrSourceParamParser()) { clearRequest = ClosePointInTimeRequest.fromXContent(parser); } - return channel -> client.execute(ClosePointInTimeAction.INSTANCE, clearRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute( + ClosePointInTimeAction.INSTANCE, + clearRequest, + new RestToXContentListener<>(channel, ClosePointInTimeResponse::status) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index f9f5f43494711..ba785fd4d9637 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -471,9 +471,6 @@ public static class Clusters implements ToXContentFragment, Writeable { private final int total; private final int successful; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map private final int skipped; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map - private final int running; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map - private final int partial; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map - private final int failed; // not used for minimize_roundtrips=true; dynamically determined from clusterInfo map // key to map is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query // the Map itself is immutable after construction - all Clusters will be accounted for at the start of the search @@ -503,6 +500,8 @@ public Clusters( assert remoteClusterIndices.size() > 0 : "At least one remote cluster must be passed into this Cluster constructor"; this.total = remoteClusterIndices.size() + (localIndices == null ? 0 : 1); assert total >= 1 : "No local indices or remote clusters passed in"; + this.successful = 0; // calculated from clusterInfo map for minimize_roundtrips + this.skipped = 0; // calculated from clusterInfo map for minimize_roundtrips this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips; Map> m = new HashMap<>(); if (localIndices != null) { @@ -517,11 +516,6 @@ public Clusters( m.put(clusterAlias, new AtomicReference<>(c)); } this.clusterInfo = Collections.unmodifiableMap(m); - this.successful = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.SUCCESSFUL); - this.skipped = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.SKIPPED); - this.running = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.RUNNING); - this.partial = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.PARTIAL); - this.failed = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.FAILED); } /** @@ -539,36 +533,39 @@ public Clusters(int total, int successful, int skipped) { this.total = total; this.successful = successful; this.skipped = skipped; - this.running = 0; - this.partial = 0; - this.failed = 0; this.ccsMinimizeRoundtrips = false; this.clusterInfo = Collections.emptyMap(); // will never be used if created from this constructor } public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); - this.successful = in.readVInt(); - this.skipped = in.readVInt(); + int successfulTemp = in.readVInt(); + int skippedTemp = in.readVInt(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_053)) { List clusterList = in.readCollectionAsList(Cluster::new); if (clusterList.isEmpty()) { this.clusterInfo = Collections.emptyMap(); + this.successful = successfulTemp; + this.skipped = skippedTemp; } else { Map> m = new HashMap<>(); clusterList.forEach(c -> m.put(c.getClusterAlias(), new AtomicReference<>(c))); this.clusterInfo = Collections.unmodifiableMap(m); + this.successful = getClusterStateCount(Cluster.Status.SUCCESSFUL); + this.skipped = getClusterStateCount(Cluster.Status.SKIPPED); } } else { + this.successful = successfulTemp; + this.skipped = skippedTemp; this.clusterInfo = Collections.emptyMap(); } - this.running = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.RUNNING); - this.partial = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.PARTIAL); - this.failed = determineCountFromClusterInfo(cluster -> cluster.getStatus() == Cluster.Status.FAILED); + int running = getClusterStateCount(Cluster.Status.RUNNING); + int partial = getClusterStateCount(Cluster.Status.PARTIAL); + int failed = getClusterStateCount(Cluster.Status.FAILED); this.ccsMinimizeRoundtrips = false; assert total >= 0 : "total is negative: " + total; - assert total >= successful + skipped + running + partial + failed - : "successful + skipped + running + partial + failed is larger than total. total: " + assert total == successful + skipped + running + partial + failed + : "successful + skipped + running + partial + failed is not equal to total. total: " + total + " successful: " + successful @@ -586,11 +583,8 @@ private Clusters(Map> clusterInfoMap) { assert clusterInfoMap.size() > 0 : "this constructor should not be called with an empty Cluster info map"; this.total = clusterInfoMap.size(); this.clusterInfo = clusterInfoMap; - this.successful = 0; // calculated from clusterInfo map for minimize_roundtrips - this.skipped = 0; // calculated from clusterInfo map for minimize_roundtrips - this.running = 0; // calculated from clusterInfo map for minimize_roundtrips - this.partial = 0; // calculated from clusterInfo map for minimize_roundtrips - this.failed = 0; // calculated from clusterInfo map for minimize_roundtrips + this.successful = getClusterStateCount(Cluster.Status.SUCCESSFUL); + this.skipped = getClusterStateCount(Cluster.Status.SKIPPED); // should only be called if "details" section of fromXContent is present (for ccsMinimizeRoundtrips) this.ccsMinimizeRoundtrips = true; } @@ -705,11 +699,9 @@ public int getTotal() { public int getClusterStateCount(Cluster.Status status) { if (clusterInfo.isEmpty()) { return switch (status) { - case RUNNING -> running; case SUCCESSFUL -> successful; - case PARTIAL -> partial; case SKIPPED -> skipped; - case FAILED -> failed; + default -> 0; }; } else { return determineCountFromClusterInfo(cluster -> cluster.getStatus() == status); @@ -752,16 +744,23 @@ public boolean equals(Object o) { } Clusters clusters = (Clusters) o; return total == clusters.total - && successful == clusters.successful - && skipped == clusters.skipped - && running == clusters.running - && partial == clusters.partial - && failed == clusters.failed; + && getClusterStateCount(Cluster.Status.SUCCESSFUL) == clusters.getClusterStateCount(Cluster.Status.SUCCESSFUL) + && getClusterStateCount(Cluster.Status.SKIPPED) == clusters.getClusterStateCount(Cluster.Status.SKIPPED) + && getClusterStateCount(Cluster.Status.RUNNING) == clusters.getClusterStateCount(Cluster.Status.RUNNING) + && getClusterStateCount(Cluster.Status.PARTIAL) == clusters.getClusterStateCount(Cluster.Status.PARTIAL) + && getClusterStateCount(Cluster.Status.FAILED) == clusters.getClusterStateCount(Cluster.Status.FAILED); } @Override public int hashCode() { - return Objects.hash(total, successful, skipped, running, partial, failed); + return Objects.hash( + total, + getClusterStateCount(Cluster.Status.SUCCESSFUL), + getClusterStateCount(Cluster.Status.SKIPPED), + getClusterStateCount(Cluster.Status.RUNNING), + getClusterStateCount(Cluster.Status.PARTIAL), + getClusterStateCount(Cluster.Status.FAILED) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index 4d3b9b0c15ff0..cb5a9ce3db353 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; @@ -124,4 +125,14 @@ public void onFailure(Exception e) { } } } + + /** + * A method to use as a placeholder in implementations of {@link TransportAction} which only ever run on the local node, and therefore + * do not need to serialize or deserialize any messages. See also {@link Writeable.Reader#localOnly()}. + */ + // TODO remove this when https://github.com/elastic/elasticsearch/issues/100111 is resolved + public static T localOnly() { + assert false : "local-only action"; + throw new UnsupportedOperationException("local-only action"); + } } diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/SynonymUpdateResponse.java b/server/src/main/java/org/elasticsearch/action/synonyms/SynonymUpdateResponse.java index fd464d787db85..7bbddfca79075 100644 --- a/server/src/main/java/org/elasticsearch/action/synonyms/SynonymUpdateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/synonyms/SynonymUpdateResponse.java @@ -12,17 +12,17 @@ import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.synonyms.SynonymsManagementAPIService.SynonymsReloadResult; import org.elasticsearch.synonyms.SynonymsManagementAPIService.UpdateSynonymsResultStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Locale; import java.util.Objects; -public class SynonymUpdateResponse extends ActionResponse implements StatusToXContentObject { +public class SynonymUpdateResponse extends ActionResponse implements ToXContentObject { private final UpdateSynonymsResultStatus updateStatus; private final ReloadAnalyzersResponse reloadAnalyzersResponse; @@ -63,7 +63,6 @@ public void writeTo(StreamOutput out) throws IOException { reloadAnalyzersResponse.writeTo(out); } - @Override public RestStatus status() { return switch (updateStatus) { case CREATED -> RestStatus.CREATED; diff --git a/server/src/main/java/org/elasticsearch/client/internal/Client.java b/server/src/main/java/org/elasticsearch/client/internal/Client.java index 925c5af303470..1065efb857fe7 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/Client.java +++ b/server/src/main/java/org/elasticsearch/client/internal/Client.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -30,7 +31,6 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequestBuilder; import org.elasticsearch.action.search.ClearScrollResponse; @@ -93,7 +93,7 @@ public interface Client extends ElasticsearchClient, Releasable { * @param request The index request * @return The result future */ - ActionFuture index(IndexRequest request); + ActionFuture index(IndexRequest request); /** * Index a document associated with a given index. @@ -103,7 +103,7 @@ public interface Client extends ElasticsearchClient, Releasable { * @param request The index request * @param listener A listener to be notified with a result */ - void index(IndexRequest request, ActionListener listener); + void index(IndexRequest request, ActionListener listener); /** * Index a document associated with a given index. diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 7a98c65d51247..5ac1ae6a76611 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequestBuilder; @@ -265,7 +266,6 @@ import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.DeletePipelineAction; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; @@ -390,12 +390,12 @@ protected abstract index(final IndexRequest request) { + public ActionFuture index(final IndexRequest request) { return execute(IndexAction.INSTANCE, request); } @Override - public void index(final IndexRequest request, final ActionListener listener) { + public void index(final IndexRequest request, final ActionListener listener) { execute(IndexAction.INSTANCE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 01008fa663ebd..12f49b2651b94 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -59,6 +59,8 @@ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); + private static final Predicate ALWAYS_TRUE = s -> true; + public static final String EXCLUDED_DATA_STREAMS_KEY = "es.excluded_ds"; public static final Version SYSTEM_INDEX_ENFORCEMENT_VERSION = Version.V_8_0_0; public static final IndexVersion SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION = IndexVersion.V_8_0_0; @@ -100,7 +102,7 @@ public String[] concreteIndexNamesWithSystemIndexAccess(ClusterState state, Indi false, request.includeDataStreams(), SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY, - name -> true, + ALWAYS_TRUE, this.getNetNewSystemIndexPredicate() ); return concreteIndexNames(context, request.indices()); @@ -395,33 +397,45 @@ Index[] concreteIndices(Context context, String... indexExpressions) { } private void checkSystemIndexAccess(Context context, Set concreteIndices) { - final Metadata metadata = context.getState().metadata(); - final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate().negate(); - final List systemIndicesThatShouldNotBeAccessed = concreteIndices.stream() - .map(metadata::index) - .filter(IndexMetadata::isSystem) - .filter(idxMetadata -> systemIndexAccessPredicate.test(idxMetadata.getIndex().getName())) - .toList(); - - if (systemIndicesThatShouldNotBeAccessed.isEmpty()) { + final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); + if (systemIndexAccessPredicate == ALWAYS_TRUE) { return; } + doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); + } + private void doCheckSystemIndexAccess(Context context, Set concreteIndices, Predicate systemIndexAccessPredicate) { + final Metadata metadata = context.getState().metadata(); final List resolvedSystemIndices = new ArrayList<>(); final List resolvedNetNewSystemIndices = new ArrayList<>(); final Set resolvedSystemDataStreams = new HashSet<>(); final SortedMap indicesLookup = metadata.getIndicesLookup(); - for (IndexMetadata idxMetadata : systemIndicesThatShouldNotBeAccessed) { - IndexAbstraction abstraction = indicesLookup.get(idxMetadata.getIndex().getName()); - if (abstraction.getParentDataStream() != null) { - resolvedSystemDataStreams.add(abstraction.getParentDataStream().getName()); - } else if (systemIndices.isNetNewSystemIndex(idxMetadata.getIndex().getName())) { - resolvedNetNewSystemIndices.add(idxMetadata.getIndex().getName()); - } else { - resolvedSystemIndices.add(idxMetadata.getIndex().getName()); + boolean matchedIndex = false; + for (Index concreteIndex : concreteIndices) { + IndexMetadata idxMetadata = metadata.index(concreteIndex); + String name = concreteIndex.getName(); + if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { + matchedIndex = true; + IndexAbstraction indexAbstraction = indicesLookup.get(name); + if (indexAbstraction.getParentDataStream() != null) { + resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); + } else if (systemIndices.isNetNewSystemIndex(name)) { + resolvedNetNewSystemIndices.add(name); + } else { + resolvedSystemIndices.add(name); + } } } + if (matchedIndex) { + handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices); + } + } + private void handleMatchedSystemIndices( + List resolvedSystemIndices, + Set resolvedSystemDataStreams, + List resolvedNetNewSystemIndices + ) { if (resolvedSystemIndices.isEmpty() == false) { Collections.sort(resolvedSystemIndices); deprecationLogger.warn( @@ -938,7 +952,7 @@ public Predicate getSystemIndexAccessPredicate() { } else if (systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY) { systemIndexAccessLevelPredicate = getNetNewSystemIndexPredicate(); } else if (systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - systemIndexAccessLevelPredicate = s -> true; + systemIndexAccessLevelPredicate = ALWAYS_TRUE; } else { // everything other than allowed should be included in the deprecation message systemIndexAccessLevelPredicate = systemIndices.getProductSystemIndexNamePredicate(threadContext); @@ -968,7 +982,7 @@ public static class Context { private final Predicate netNewSystemIndexPredicate; Context(ClusterState state, IndicesOptions options, SystemIndexAccessLevel systemIndexAccessLevel) { - this(state, options, systemIndexAccessLevel, s -> true, s -> false); + this(state, options, systemIndexAccessLevel, ALWAYS_TRUE, s -> false); } Context( diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index b51364ebc2c84..f45f16db502e9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -26,6 +26,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -125,16 +126,32 @@ public GroupShardsIterator searchShards( nodeCounts ); if (iterator != null) { - var shardsThatCanHandleSearches = iterator.getShardRoutings() - .stream() - .filter(shardRouting -> canSearchShard(shardRouting, clusterState)) - .toList(); + final List shardsThatCanHandleSearches; + if (isStateless) { + shardsThatCanHandleSearches = statelessShardsThatHandleSearches(clusterState, iterator); + } else { + shardsThatCanHandleSearches = statefulShardsThatHandleSearches(iterator); + } set.add(new PlainShardIterator(iterator.shardId(), shardsThatCanHandleSearches)); } } return GroupShardsIterator.sortAndCreate(new ArrayList<>(set)); } + private static List statefulShardsThatHandleSearches(ShardIterator iterator) { + final List shardsThatCanHandleSearches = new ArrayList<>(iterator.size()); + for (ShardRouting shardRouting : iterator) { + if (shardRouting.isSearchable()) { + shardsThatCanHandleSearches.add(shardRouting); + } + } + return shardsThatCanHandleSearches; + } + + private static List statelessShardsThatHandleSearches(ClusterState clusterState, ShardIterator iterator) { + return iterator.getShardRoutings().stream().filter(shardRouting -> canSearchShard(shardRouting, clusterState)).toList(); + } + public static ShardIterator getShards(ClusterState clusterState, ShardId shardId) { final IndexShardRoutingTable shard = clusterState.routingTable().shardRoutingTable(shardId); return shard.activeInitializingShardsRandomIt(); @@ -177,7 +194,7 @@ private ShardIterator preferenceActiveShardIterator( @Nullable Map nodeCounts ) { if (preference == null || preference.isEmpty()) { - return shardRoutings(indexShard, nodes, collectorService, nodeCounts); + return shardRoutings(indexShard, collectorService, nodeCounts); } if (preference.charAt(0) == '_') { Preference preferenceType = Preference.parse(preference); @@ -204,7 +221,7 @@ private ShardIterator preferenceActiveShardIterator( } // no more preference if (index == -1 || index == preference.length() - 1) { - return shardRoutings(indexShard, nodes, collectorService, nodeCounts); + return shardRoutings(indexShard, collectorService, nodeCounts); } else { // update the preference and continue preference = preference.substring(index + 1); @@ -237,7 +254,6 @@ private ShardIterator preferenceActiveShardIterator( private ShardIterator shardRoutings( IndexShardRoutingTable indexShard, - DiscoveryNodes nodes, @Nullable ResponseCollectorService collectorService, @Nullable Map nodeCounts ) { diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index c87d9e46f9660..ce4d249b2e112 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -37,28 +37,39 @@ public interface BlobContainer { /** * Tests whether a blob with the given blob name exists in the container. * - * @param blobName - * The name of the blob whose existence is to be determined. - * @return {@code true} if a blob exists in the {@link BlobContainer} with the given name, and {@code false} otherwise. + * @param purpose The purpose of the operation + * @param blobName The name of the blob whose existence is to be determined. + * @return {@code true} if a blob exists in the {@link BlobContainer} with the given name, and {@code false} otherwise. */ - boolean blobExists(String blobName) throws IOException; + boolean blobExists(OperationPurpose purpose, String blobName) throws IOException; + + @Deprecated(forRemoval = true) + default boolean blobExists(String blobName) throws IOException { + return blobExists(OperationPurpose.SNAPSHOT, blobName); + } /** * Creates a new {@link InputStream} for the given blob name. * - * @param blobName - * The name of the blob to get an {@link InputStream} for. - * @return The {@code InputStream} to read the blob. - * @throws NoSuchFileException if the blob does not exist - * @throws IOException if the blob can not be read. + * @param purpose The purpose of the operation + * @param blobName The name of the blob to get an {@link InputStream} for. + * @return The {@code InputStream} to read the blob. + * @throws NoSuchFileException if the blob does not exist + * @throws IOException if the blob can not be read. */ - InputStream readBlob(String blobName) throws IOException; + InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException; + + @Deprecated(forRemoval = true) + default InputStream readBlob(String blobName) throws IOException { + return readBlob(OperationPurpose.SNAPSHOT, blobName); + } /** * Creates a new {@link InputStream} that can be used to read the given blob starting from * a specific {@code position} in the blob. The {@code length} is an indication of the * number of bytes that are expected to be read from the {@link InputStream}. * + * @param purpose The purpose of the operation * @param blobName The name of the blob to get an {@link InputStream} for. * @param position The position in the blob where the next byte will be read. * @param length An indication of the number of bytes to be read. @@ -66,10 +77,15 @@ public interface BlobContainer { * @throws NoSuchFileException if the blob does not exist * @throws IOException if the blob can not be read. */ - InputStream readBlob(String blobName, long position, long length) throws IOException; + InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException; + + @Deprecated(forRemoval = true) + default InputStream readBlob(String blobName, long position, long length) throws IOException { + return readBlob(OperationPurpose.SNAPSHOT, blobName, position, length); + } /** - * Provides a hint to clients for a suitable length to use with {@link BlobContainer#readBlob(String, long, long)}. + * Provides a hint to clients for a suitable length to use with {@link BlobContainer#readBlob(OperationPurpose, String, long, long)}. * * Some blob containers have nontrivial costs attached to each readBlob call, so it is a good idea for consumers to speculatively * request more data than they need right now and to re-use this stream for future needs if possible. @@ -91,34 +107,41 @@ default long readBlobPreferredLength() { * This method assumes the container does not already contain a blob of the same blobName. If a blob by the * same name already exists, the operation will fail and an {@link IOException} will be thrown. * - * @param blobName - * The name of the blob to write the contents of the input stream to. - * @param inputStream - * The input stream from which to retrieve the bytes to write to the blob. - * @param blobSize - * The size of the blob to be written, in bytes. It is implementation dependent whether - * this value is used in writing the blob to the repository. - * @param failIfAlreadyExists - * whether to throw a FileAlreadyExistsException if the given blob already exists - * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists - * @throws IOException if the input stream could not be read, or the target blob could not be written to. + * @param purpose The purpose of the operation + * @param blobName The name of the blob to write the contents of the input stream to. + * @param inputStream The input stream from which to retrieve the bytes to write to the blob. + * @param blobSize The size of the blob to be written, in bytes. It is implementation dependent whether + * this value is used in writing the blob to the repository. + * @param failIfAlreadyExists whether to throw a FileAlreadyExistsException if the given blob already exists + * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists + * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ - void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException; + void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException; + + @Deprecated(forRemoval = true) + default void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + writeBlob(OperationPurpose.SNAPSHOT, blobName, inputStream, blobSize, failIfAlreadyExists); + } /** * Reads blob content from a {@link BytesReference} and writes it to the container in a new blob with the given name. * - * @param blobName - * The name of the blob to write the contents of the input stream to. - * @param bytes - * The bytes to write - * @param failIfAlreadyExists - * whether to throw a FileAlreadyExistsException if the given blob already exists - * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists - * @throws IOException if the input stream could not be read, or the target blob could not be written to. + * @param purpose The purpose of the operation + * @param blobName The name of the blob to write the contents of the input stream to. + * @param bytes The bytes to write + * @param failIfAlreadyExists whether to throw a FileAlreadyExistsException if the given blob already exists + * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists + * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ + default void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + writeBlob(purpose, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); + } + + @Deprecated(forRemoval = true) default void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); + writeBlob(OperationPurpose.SNAPSHOT, blobName, bytes, failIfAlreadyExists); } /** @@ -127,45 +150,76 @@ default void writeBlob(String blobName, BytesReference bytes, boolean failIfAlre * This method is only used for streaming serialization of repository metadata that is known to be of limited size * at any point in time and across all concurrent invocations of this method. * + * @param purpose The purpose of the operation * @param blobName the name of the blob to write * @param failIfAlreadyExists whether to throw a FileAlreadyExistsException if the given blob already exists * @param atomic whether the write should be atomic in case the implementation supports it * @param writer consumer for an output stream that will write the blob contents to the stream */ - void writeMetadataBlob(String blobName, boolean failIfAlreadyExists, boolean atomic, CheckedConsumer writer) - throws IOException; + void writeMetadataBlob( + OperationPurpose purpose, + String blobName, + boolean failIfAlreadyExists, + boolean atomic, + CheckedConsumer writer + ) throws IOException; + + @Deprecated(forRemoval = true) + default void writeMetadataBlob( + String blobName, + boolean failIfAlreadyExists, + boolean atomic, + CheckedConsumer writer + ) throws IOException { + writeMetadataBlob(OperationPurpose.SNAPSHOT, blobName, failIfAlreadyExists, atomic, writer); + } /** * Reads blob content from a {@link BytesReference} and writes it to the container in a new blob with the given name, * using an atomic write operation if the implementation supports it. * - * @param blobName - * The name of the blob to write the contents of the input stream to. - * @param bytes - * The bytes to write - * @param failIfAlreadyExists - * whether to throw a FileAlreadyExistsException if the given blob already exists - * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists - * @throws IOException if the input stream could not be read, or the target blob could not be written to. + * @param purpose The purpose of the operation + * @param blobName The name of the blob to write the contents of the input stream to. + * @param bytes The bytes to write + * @param failIfAlreadyExists whether to throw a FileAlreadyExistsException if the given blob already exists + * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists + * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ - void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException; + void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException; + + @Deprecated(forRemoval = true) + default void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, bytes, failIfAlreadyExists); + } /** * Deletes this container and all its contents from the repository. * + * @param purpose The purpose of the operation * @return delete result * @throws IOException on failure */ - DeleteResult delete() throws IOException; + DeleteResult delete(OperationPurpose purpose) throws IOException; + + @Deprecated(forRemoval = true) + default DeleteResult delete() throws IOException { + return delete(OperationPurpose.SNAPSHOT); + } /** * Deletes the blobs with given names. This method will not throw an exception * when one or multiple of the given blobs don't exist and simply ignore this case. * - * @param blobNames the names of the blobs to delete - * @throws IOException if a subset of blob exists but could not be deleted. + * @param purpose The purpose of the operation + * @param blobNames the names of the blobs to delete + * @throws IOException if a subset of blob exists but could not be deleted. */ - void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException; + void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException; + + @Deprecated(forRemoval = true) + default void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames); + } /** * Lists all blobs in the container. @@ -174,33 +228,50 @@ void writeMetadataBlob(String blobName, boolean failIfAlreadyExists, boolean ato * the values are {@link BlobMetadata}, containing basic information about each blob. * @throws IOException if there were any failures in reading from the blob container. */ - Map listBlobs() throws IOException; + Map listBlobs(OperationPurpose purpose) throws IOException; + + @Deprecated(forRemoval = true) + default Map listBlobs() throws IOException { + return listBlobs(OperationPurpose.SNAPSHOT); + } /** * Lists all child containers under this container. A child container is defined as a container whose {@link #path()} method returns * a path that has this containers {@link #path()} return as its prefix and has one more path element than the current * container's path. * + * @param purpose The purpose of the operation * @return Map of name of the child container to child container * @throws IOException on failure to list child containers */ - Map children() throws IOException; + Map children(OperationPurpose purpose) throws IOException; + + @Deprecated(forRemoval = true) + default Map children() throws IOException { + return children(OperationPurpose.SNAPSHOT); + } /** * Lists all blobs in the container that match the specified prefix. * - * @param blobNamePrefix - * The prefix to match against blob names in the container. - * @return A map of the matching blobs in the container. The keys in the map are the names of the blobs - * and the values are {@link BlobMetadata}, containing basic information about each blob. - * @throws IOException if there were any failures in reading from the blob container. + * @param purpose The purpose of the operation + * @param blobNamePrefix The prefix to match against blob names in the container. + * @return A map of the matching blobs in the container. The keys in the map are the names of the blobs + * and the values are {@link BlobMetadata}, containing basic information about each blob. + * @throws IOException if there were any failures in reading from the blob container. */ - Map listBlobsByPrefix(String blobNamePrefix) throws IOException; + Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException; + + @Deprecated(forRemoval = true) + default Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + return listBlobsByPrefix(OperationPurpose.SNAPSHOT, blobNamePrefix); + } /** * Atomically sets the value stored at the given key to {@code updated} if the {@code current value == expected}. * Keys not yet used start at initial value 0. Returns the current value (before it was updated). * + * @param purpose The purpose of the operation * @param key key of the value to update * @param expected the expected value * @param updated the new value @@ -208,24 +279,43 @@ void writeMetadataBlob(String blobName, boolean failIfAlreadyExists, boolean ato * {@link OptionalBytesReference#MISSING} if the value could not be read due to concurrent activity. */ void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, ActionListener listener ); + @Deprecated(forRemoval = true) + default void compareAndExchangeRegister( + String key, + BytesReference expected, + BytesReference updated, + ActionListener listener + ) { + compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, expected, updated, listener); + } + /** * Atomically sets the value stored at the given key to {@code updated} if the {@code current value == expected}. * Keys not yet used start at initial value 0. * + * @param purpose * @param key key of the value to update * @param expected the expected value * @param updated the new value * @param listener a listener which is completed with {@link Boolean#TRUE} if successful, {@link Boolean#FALSE} if the expected value * did not match the updated value or the value could not be read due to concurrent activity */ - default void compareAndSetRegister(String key, BytesReference expected, BytesReference updated, ActionListener listener) { + default void compareAndSetRegister( + OperationPurpose purpose, + String key, + BytesReference expected, + BytesReference updated, + ActionListener listener + ) { compareAndExchangeRegister( + purpose, key, expected, updated, @@ -233,16 +323,27 @@ default void compareAndSetRegister(String key, BytesReference expected, BytesRef ); } + @Deprecated(forRemoval = true) + default void compareAndSetRegister(String key, BytesReference expected, BytesReference updated, ActionListener listener) { + compareAndSetRegister(OperationPurpose.SNAPSHOT, key, expected, updated, listener); + } + /** * Gets the value set by {@link #compareAndSetRegister} or {@link #compareAndExchangeRegister} for a given key. * If a key has not yet been used, the initial value is an empty {@link BytesReference}. * + * @param purpose The purpose of the operation * @param key key of the value to get * @param listener a listener, completed with the value read from the register or {@code OptionalBytesReference#MISSING} if the value * could not be read due to concurrent activity. */ + default void getRegister(OperationPurpose purpose, String key, ActionListener listener) { + compareAndExchangeRegister(purpose, key, BytesArray.EMPTY, BytesArray.EMPTY, listener); + } + + @Deprecated(forRemoval = true) default void getRegister(String key, ActionListener listener) { - compareAndExchangeRegister(key, BytesArray.EMPTY, BytesArray.EMPTY, listener); + getRegister(OperationPurpose.SNAPSHOT, key, listener); } } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java index d66b8b970437e..4b602822f5f2f 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java @@ -27,7 +27,22 @@ public interface BlobStore extends Closeable { * Delete all the provided blobs from the blob store. Each blob could belong to a different {@code BlobContainer} * @param blobNames the blobs to be deleted */ - void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException; + @Deprecated(forRemoval = true) + default void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames); + } + + // TODO: Remove the default implementation and require each blob store to implement this method. Once it's done, remove the + // the above overload version that does not take the Purpose parameter. + /** + * Delete all the provided blobs from the blob store. Each blob could belong to a different {@code BlobContainer} + * + * @param purpose the purpose of the delete operation + * @param blobNames the blobs to be deleted + */ + default void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + throw new UnsupportedOperationException(); + } /** * Returns statistics on the count of operations that have been performed on this blob store diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java new file mode 100644 index 0000000000000..2cfa309c1f7c1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.blobstore; + +/** + * The purpose of an operation against the blobstore. For example, it can be useful for stats collection + * as well as other things that requires further differentiation for the same blob operation. + */ +public enum OperationPurpose { + SNAPSHOT("Snapshot"), + CLUSTER_STATE("ClusterState"), + INDICES("Indices"), + TRANSLOG("Translog"); + + private final String key; + + OperationPurpose(String key) { + this.key = key; + } + + public String getKey() { + return key; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 838d0e3f4d08c..f3857fe60b08d 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.BlobContainerUtils; @@ -84,12 +85,12 @@ public FsBlobContainer(FsBlobStore blobStore, BlobPath blobPath, Path path) { } @Override - public Map listBlobs() throws IOException { - return listBlobsByPrefix(null); + public Map listBlobs(OperationPurpose purpose) throws IOException { + return listBlobsByPrefix(purpose, null); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { Map builder = new HashMap<>(); try (DirectoryStream stream = Files.newDirectoryStream(path)) { for (Path file : stream) { @@ -103,7 +104,7 @@ public Map children() throws IOException { } @Override - public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { Map builder = new HashMap<>(); blobNamePrefix = blobNamePrefix == null ? "" : blobNamePrefix; @@ -157,7 +158,7 @@ public void close() {} } @Override - public DeleteResult delete() throws IOException { + public DeleteResult delete(OperationPurpose purpose) throws IOException { final AtomicLong filesDeleted = new AtomicLong(0L); final AtomicLong bytesDeleted = new AtomicLong(0L); Files.walkFileTree(path, new SimpleFileVisitor<>() { @@ -180,17 +181,17 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - blobStore.deleteBlobsIgnoringIfNotExists(Iterators.map(blobNames, blobName -> path.resolve(blobName).toString())); + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + blobStore.deleteBlobsIgnoringIfNotExists(purpose, Iterators.map(blobNames, blobName -> path.resolve(blobName).toString())); } @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { return Files.exists(path.resolve(blobName)); } @Override - public InputStream readBlob(String name) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name) throws IOException { final Path resolvedPath = path.resolve(name); try { return Files.newInputStream(resolvedPath); @@ -200,7 +201,7 @@ public InputStream readBlob(String name) throws IOException { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { final SeekableByteChannel channel = Files.newByteChannel(path.resolve(blobName)); if (position > 0L) { channel.position(position); @@ -216,7 +217,8 @@ public long readBlobPreferredLength() { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { final Path file = path.resolve(blobName); try { writeToPath(inputStream, file, blobSize); @@ -224,14 +226,14 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b if (failIfAlreadyExists) { throw faee; } - deleteBlobsIgnoringIfNotExists(Iterators.single(blobName)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(blobName)); writeToPath(inputStream, file, blobSize); } IOUtils.fsync(path, true); } @Override - public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { final Path file = path.resolve(blobName); try { writeToPath(bytes, file); @@ -239,7 +241,7 @@ public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlrea if (failIfAlreadyExists) { throw faee; } - deleteBlobsIgnoringIfNotExists(Iterators.single(blobName)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(blobName)); writeToPath(bytes, file); } IOUtils.fsync(path, true); @@ -247,6 +249,7 @@ public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlrea @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -255,24 +258,28 @@ public void writeMetadataBlob( if (atomic) { final String tempBlob = tempBlobName(blobName); try { - writeToPath(tempBlob, true, writer); - moveBlobAtomic(tempBlob, blobName, failIfAlreadyExists); + writeToPath(purpose, tempBlob, true, writer); + moveBlobAtomic(purpose, tempBlob, blobName, failIfAlreadyExists); } catch (IOException ex) { try { - deleteBlobsIgnoringIfNotExists(Iterators.single(tempBlob)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(tempBlob)); } catch (IOException e) { ex.addSuppressed(e); } throw ex; } } else { - writeToPath(blobName, failIfAlreadyExists, writer); + writeToPath(purpose, blobName, failIfAlreadyExists, writer); } IOUtils.fsync(path, true); } - private void writeToPath(String blobName, boolean failIfAlreadyExists, CheckedConsumer writer) - throws IOException { + private void writeToPath( + OperationPurpose purpose, + String blobName, + boolean failIfAlreadyExists, + CheckedConsumer writer + ) throws IOException { final Path file = path.resolve(blobName); try { try (OutputStream out = blobOutputStream(file)) { @@ -282,7 +289,7 @@ private void writeToPath(String blobName, boolean failIfAlreadyExists, CheckedCo if (failIfAlreadyExists) { throw faee; } - deleteBlobsIgnoringIfNotExists(Iterators.single(blobName)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(blobName)); try (OutputStream out = blobOutputStream(file)) { writer.accept(out); } @@ -291,15 +298,16 @@ private void writeToPath(String blobName, boolean failIfAlreadyExists, CheckedCo } @Override - public void writeBlobAtomic(final String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlobAtomic(OperationPurpose purpose, final String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { final String tempBlob = tempBlobName(blobName); final Path tempBlobPath = path.resolve(tempBlob); try { writeToPath(bytes, tempBlobPath); - moveBlobAtomic(tempBlob, blobName, failIfAlreadyExists); + moveBlobAtomic(purpose, tempBlob, blobName, failIfAlreadyExists); } catch (IOException ex) { try { - deleteBlobsIgnoringIfNotExists(Iterators.single(tempBlob)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(tempBlob)); } catch (IOException e) { ex.addSuppressed(e); } @@ -328,8 +336,12 @@ private void writeToPath(InputStream inputStream, Path tempBlobPath, long blobSi IOUtils.fsync(tempBlobPath, false); } - public void moveBlobAtomic(final String sourceBlobName, final String targetBlobName, final boolean failIfAlreadyExists) - throws IOException { + public void moveBlobAtomic( + OperationPurpose purpose, + final String sourceBlobName, + final String targetBlobName, + final boolean failIfAlreadyExists + ) throws IOException { final Path sourceBlobPath = path.resolve(sourceBlobName); final Path targetBlobPath = path.resolve(targetBlobName); try { @@ -344,13 +356,14 @@ public void moveBlobAtomic(final String sourceBlobName, final String targetBlobN if (failIfAlreadyExists) { throw e; } - moveBlobNonAtomic(targetBlobName, sourceBlobPath, targetBlobPath, e); + moveBlobNonAtomic(purpose, targetBlobName, sourceBlobPath, targetBlobPath, e); } } - private void moveBlobNonAtomic(String targetBlobName, Path sourceBlobPath, Path targetBlobPath, IOException e) throws IOException { + private void moveBlobNonAtomic(OperationPurpose purpose, String targetBlobName, Path sourceBlobPath, Path targetBlobPath, IOException e) + throws IOException { try { - deleteBlobsIgnoringIfNotExists(Iterators.single(targetBlobName)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(targetBlobName)); Files.move(sourceBlobPath, targetBlobPath, StandardCopyOption.ATOMIC_MOVE); } catch (IOException ex) { ex.addSuppressed(e); @@ -378,6 +391,7 @@ private static OutputStream blobOutputStream(Path file) throws IOException { @Override @SuppressForbidden(reason = "write to channel that we have open for locking purposes already directly") public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java index 77553ea21c5bf..5d128d0178e9c 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; import java.io.IOException; @@ -64,7 +65,7 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { IOException ioe = null; long suppressedExceptions = 0; while (blobNames.hasNext()) { diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/support/FilterBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/support/FilterBlobContainer.java index b2a690c160900..d231e5046e1c8 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/support/FilterBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/support/FilterBlobContainer.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.CheckedConsumer; @@ -40,18 +41,18 @@ public BlobPath path() { } @Override - public boolean blobExists(String blobName) throws IOException { - return delegate.blobExists(blobName); + public boolean blobExists(OperationPurpose purpose, String blobName) throws IOException { + return delegate.blobExists(purpose, blobName); } @Override - public InputStream readBlob(String blobName) throws IOException { - return delegate.readBlob(blobName); + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return delegate.readBlob(purpose, blobName); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { - return delegate.readBlob(blobName, position, length); + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return delegate.readBlob(purpose, blobName, position, length); } @Override @@ -60,67 +61,77 @@ public long readBlobPreferredLength() { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { - delegate.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { + delegate.writeBlob(purpose, blobName, inputStream, blobSize, failIfAlreadyExists); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, CheckedConsumer writer ) throws IOException { - delegate.writeMetadataBlob(blobName, failIfAlreadyExists, atomic, writer); + delegate.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, atomic, writer); } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - delegate.writeBlobAtomic(blobName, bytes, failIfAlreadyExists); + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + delegate.writeBlobAtomic(purpose, blobName, bytes, failIfAlreadyExists); } @Override - public DeleteResult delete() throws IOException { - return delegate.delete(); + public DeleteResult delete(OperationPurpose purpose) throws IOException { + return delegate.delete(purpose); } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - delegate.deleteBlobsIgnoringIfNotExists(blobNames); + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + delegate.deleteBlobsIgnoringIfNotExists(purpose, blobNames); } @Override - public Map listBlobs() throws IOException { - return delegate.listBlobs(); + public Map listBlobs(OperationPurpose purpose) throws IOException { + return delegate.listBlobs(purpose); } @Override - public Map children() throws IOException { - return delegate.children().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> wrapChild(e.getValue()))); + public Map children(OperationPurpose purpose) throws IOException { + return delegate.children(purpose).entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> wrapChild(e.getValue()))); } @Override - public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { - return delegate.listBlobsByPrefix(blobNamePrefix); + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { + return delegate.listBlobsByPrefix(purpose, blobNamePrefix); } @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, ActionListener listener ) { - delegate.compareAndExchangeRegister(key, expected, updated, listener); + delegate.compareAndExchangeRegister(purpose, key, expected, updated, listener); } @Override - public void compareAndSetRegister(String key, BytesReference expected, BytesReference updated, ActionListener listener) { - delegate.compareAndSetRegister(key, expected, updated, listener); + public void compareAndSetRegister( + OperationPurpose purpose, + String key, + BytesReference expected, + BytesReference updated, + ActionListener listener + ) { + delegate.compareAndSetRegister(purpose, key, expected, updated, listener); } @Override - public void getRegister(String key, ActionListener listener) { - delegate.getRegister(key, listener); + public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { + delegate.getRegister(purpose, key, listener); } } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java b/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java index b263edb112079..d3422c1b51a22 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java @@ -8,6 +8,8 @@ package org.elasticsearch.common.io.stream; +import org.elasticsearch.action.support.TransportAction; + import java.io.IOException; /** @@ -74,6 +76,13 @@ interface Reader { */ V read(StreamInput in) throws IOException; + /** + * A {@link Reader} which must never be called, for use in local-only transport actions. See also {@link TransportAction#localOnly}. + */ + // TODO remove this when https://github.com/elastic/elasticsearch/issues/100111 is resolved + static Reader localOnly() { + return in -> TransportAction.localOnly(); + } } } diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java index de061c7f314d6..91dbfc30123fe 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java @@ -160,7 +160,14 @@ public void writeTo(StreamOutput out) throws IOException { @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + startOffsets.ramBytesUsed() + bytes.ramBytesUsed(); + return BASE_RAM_BYTES_USED + bigArraysRamBytesUsed(); + } + + /** + * Memory used by the {@link BigArrays} portion of this {@link BytesRefArray}. + */ + public long bigArraysRamBytesUsed() { + return startOffsets.ramBytesUsed() + bytes.ramBytesUsed(); } } diff --git a/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java b/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java index 262187c7879d9..de03455823103 100644 --- a/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java +++ b/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java @@ -17,21 +17,35 @@ */ public final class CachedSupplier implements Supplier { - private Supplier supplier; - private T result; - private boolean resultSet; + private volatile Supplier supplier; + private volatile T result; - public CachedSupplier(Supplier supplier) { + public static CachedSupplier wrap(Supplier supplier) { + if (supplier instanceof CachedSupplier c) { + // no need to wrap a cached supplier again + return c; + } + return new CachedSupplier<>(supplier); + } + + private CachedSupplier(Supplier supplier) { this.supplier = supplier; } @Override - public synchronized T get() { - if (resultSet == false) { - result = supplier.get(); - resultSet = true; + public T get() { + if (supplier == null) { + return result; } + initResult(); return result; } + private synchronized void initResult() { + if (supplier != null) { + result = supplier.get(); + supplier = null; + } + } + } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/StatusToXContentObject.java b/server/src/main/java/org/elasticsearch/common/xcontent/StatusToXContentObject.java deleted file mode 100644 index e0af9777fddfc..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/xcontent/StatusToXContentObject.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.common.xcontent; - -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Objects that can both render themselves in as json/yaml/etc and can provide a {@link RestStatus} for their response. Usually should be - * implemented by top level responses sent back to users from REST endpoints. - */ -public interface StatusToXContentObject extends ToXContentObject { - - /** - * Returns the REST status to make sure it is returned correctly - */ - RestStatus status(); -} diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java index 5e60b2b6c87ba..b571c3f1f005a 100644 --- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java @@ -14,13 +14,14 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Nullable; import org.elasticsearch.health.stats.HealthApiStats; @@ -45,7 +46,7 @@ public class GetHealthAction extends ActionType { public static final String NAME = "cluster:monitor/health_api"; private GetHealthAction() { - super(NAME, GetHealthAction.Response::new); + super(NAME, Writeable.Reader.localOnly()); } public static class Response extends ActionResponse implements ChunkedToXContent { @@ -55,10 +56,6 @@ public static class Response extends ActionResponse implements ChunkedToXContent private final HealthStatus status; private final List indicators; - public Response(StreamInput in) { - throw new AssertionError("GetHealthAction should not be sent over the wire."); - } - public Response(final ClusterName clusterName, final List indicators, boolean showTopLevelStatus) { this.indicators = indicators; this.clusterName = clusterName; @@ -90,7 +87,7 @@ public List getIndicatorResults() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new AssertionError("GetHealthAction should not be sent over the wire."); + TransportAction.localOnly(); } @Override @@ -173,9 +170,14 @@ public ActionRequestValidationException validate() { public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { return new CancellableTask(id, type, action, "", parentTaskId, headers); } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } } - public static class TransportAction extends org.elasticsearch.action.support.TransportAction { + public static class LocalAction extends TransportAction { private final ClusterService clusterService; private final HealthService healthService; @@ -183,7 +185,7 @@ public static class TransportAction extends org.elasticsearch.action.support.Tra private final HealthApiStats healthApiStats; @Inject - public TransportAction( + public LocalAction( ActionFilters actionFilters, TransportService transportService, ClusterService clusterService, diff --git a/server/src/main/java/org/elasticsearch/index/Index.java b/server/src/main/java/org/elasticsearch/index/Index.java index 85468326954d6..e11fd394d60a9 100644 --- a/server/src/main/java/org/elasticsearch/index/Index.java +++ b/server/src/main/java/org/elasticsearch/index/Index.java @@ -103,9 +103,14 @@ public void writeTo(final StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); + toXContentFragment(builder); + return builder.endObject(); + } + + public XContentBuilder toXContentFragment(final XContentBuilder builder) throws IOException { builder.field(INDEX_NAME_KEY, name); builder.field(INDEX_UUID_KEY, uuid); - return builder.endObject(); + return builder; } public static Index fromXContent(final XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 37fce6411af4f..e9651a7f63867 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -878,6 +878,18 @@ protected GetResult realtimeGetUnderLock( } boolean getFromSearcherIfNotInTranslog = getFromSearcher; if (versionValue != null) { + /* + * Once we've seen the ID in the live version map, in two cases it is still possible not to + * be able to follow up with serving the get from the translog: + * 1. It is possible that once attempt handling the get, we won't see the doc in the translog + * since it might have been moved out. + * TODO: ideally we should keep around translog entries long enough to cover this case + * 2. We might not be tracking translog locations in the live version map (see @link{trackTranslogLocation}) + * + * In these cases, we should always fall back to get the doc from the internal searcher. + */ + + getFromSearcherIfNotInTranslog = true; if (versionValue.isDelete()) { return GetResult.NOT_EXISTS; } @@ -911,11 +923,8 @@ protected GetResult realtimeGetUnderLock( throw new EngineException(shardId, "failed to read operation from translog", e); } } else { + // We need to start tracking translog locations in the live version map. trackTranslogLocation.set(true); - // We need to start tracking translog locations in the live version map. Refresh and - // serve all the real-time gets with a missing translog location from the internal searcher - // (until a flush happens) even if we're supposed to only get from translog. - getFromSearcherIfNotInTranslog = true; } } assert versionValue.seqNo >= 0 : versionValue; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/HistogramValue.java b/server/src/main/java/org/elasticsearch/index/fielddata/HistogramValue.java index 902246b442e3b..3ee868ed85ee2 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/HistogramValue.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/HistogramValue.java @@ -32,6 +32,6 @@ public abstract class HistogramValue { * The current count of the histogram * @return the current count of the histogram */ - public abstract int count(); + public abstract long count(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index a9d90f80c8a18..350ac22c5e216 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -36,7 +36,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; @@ -1108,7 +1107,7 @@ public static Parameter> metaParam() { return new Parameter<>( "meta", true, - Collections::emptyMap, + Map::of, (n, c, o) -> TypeParsers.parseMeta(n, o), m -> m.fieldType().meta(), XContentBuilder::stringStringMap, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index d7fa0dae21b38..21ed56a82292c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -79,7 +79,9 @@ public MappedFieldType( this.isStored = isStored; this.docValues = hasDocValues; this.textSearchInfo = Objects.requireNonNull(textSearchInfo); - this.meta = Objects.requireNonNull(meta); + // meta should be sorted but for the one item or empty case we can fall back to immutable maps to save some memory since order is + // irrelevant + this.meta = meta.size() <= 1 ? Map.copyOf(meta) : meta; } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index c42c4df01c5fa..40c96b9976317 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -38,6 +38,9 @@ public static Map parseMeta(String name, Object metaObject) { } @SuppressWarnings("unchecked") Map meta = (Map) metaObject; + if (meta.isEmpty()) { + return Map.of(); + } if (meta.size() > 5) { throw new MapperParsingException("[meta] can't have more than 5 entries, but got " + meta.size() + " on field [" + name + "]"); } @@ -69,6 +72,12 @@ public static Map parseMeta(String name, Object metaObject) { ); } } + var entrySet = meta.entrySet(); + if (entrySet.size() == 1) { + // no need to sort for a single entry + var entry = entrySet.iterator().next(); + return Map.of(entry.getKey(), (String) entry.getValue()); + } Map sortedMeta = new TreeMap<>(); for (Map.Entry entry : meta.entrySet()) { sortedMeta.put(entry.getKey(), (String) entry.getValue()); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java index 566df5dc03665..daf9a809dcf07 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.elasticsearch.index.snapshots.blobstore.SlicedInputStream; @@ -49,7 +50,7 @@ public InputStream getInputStreamForSnapshotFile( inputStream = new SlicedInputStream(fileInfo.numberOfParts()) { @Override protected InputStream openSlice(int slice) throws IOException { - return container.readBlob(fileInfo.partName(slice)); + return container.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(slice)); } }; } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 3bc9cf2a5a116..db38ca1e037a1 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -17,8 +17,9 @@ public interface InferenceService { String name(); /** - * Parse model configuration from the {@code config map} and return - * the parsed {@link Model}. + * Parse model configuration from the {@code config map} from a request and return + * the parsed {@link Model}. This requires that both the secrets and service settings be contained in the + * {@code service_settings} field. * This function modifies {@code config map}, fields are removed * from the map as they are read. * @@ -27,21 +28,25 @@ public interface InferenceService { * * @param modelId Model Id * @param taskType The model task type - * @param config Configuration options + * @param config Configuration options including the secrets * @return The parsed {@link Model} */ - Model parseConfigStrict(String modelId, TaskType taskType, Map config); + Model parseRequestConfig(String modelId, TaskType taskType, Map config); /** - * As {@link #parseConfigStrict(String, TaskType, Map)} but the function - * does not throw on unrecognized options. + * Parse model configuration from {@code config map} from persisted storage and return the parsed {@link Model}. This requires that + * secrets and service settings be in two separate maps. + * This function modifies {@code config map}, fields are removed from the map as they are read. + * + * If the map contains unrecognized configuration options, no error is thrown. * * @param modelId Model Id * @param taskType The model task type * @param config Configuration options + * @param secrets Sensitive configuration options (e.g. api key) * @return The parsed {@link Model} */ - Model parseConfigLenient(String modelId, TaskType taskType, Map config); + Model parsePersistedConfig(String modelId, TaskType taskType, Map config, Map secrets); /** * Perform inference on the model. diff --git a/server/src/main/java/org/elasticsearch/inference/Model.java b/server/src/main/java/org/elasticsearch/inference/Model.java index 67ee58bad733c..eedb67a8111e5 100644 --- a/server/src/main/java/org/elasticsearch/inference/Model.java +++ b/server/src/main/java/org/elasticsearch/inference/Model.java @@ -8,101 +8,72 @@ package org.elasticsearch.inference; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.VersionedNamedWriteable; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; import java.util.Objects; -public class Model implements ToXContentObject, VersionedNamedWriteable { - - public static final String MODEL_ID = "model_id"; - public static final String SERVICE = "service"; - public static final String SERVICE_SETTINGS = "service_settings"; - public static final String TASK_SETTINGS = "task_settings"; - - private static final String NAME = "inference_model"; - +public class Model { public static String documentId(String modelId) { return "model_" + modelId; } - private final String modelId; - private final TaskType taskType; - private final String service; - private final ServiceSettings serviceSettings; - private final TaskSettings taskSettings; - - public Model(String modelId, TaskType taskType, String service, ServiceSettings serviceSettings, TaskSettings taskSettings) { - this.modelId = modelId; - this.taskType = taskType; - this.service = service; - this.serviceSettings = serviceSettings; - this.taskSettings = taskSettings; - } - - public Model(StreamInput in) throws IOException { - this.modelId = in.readString(); - this.taskType = in.readEnum(TaskType.class); - this.service = in.readString(); - this.serviceSettings = in.readNamedWriteable(ServiceSettings.class); - this.taskSettings = in.readNamedWriteable(TaskSettings.class); - } + private final ModelConfigurations configurations; + private final ModelSecrets secrets; - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(modelId); - out.writeEnum(taskType); - out.writeString(service); - out.writeNamedWriteable(serviceSettings); - out.writeNamedWriteable(taskSettings); + public Model(ModelConfigurations configurations, ModelSecrets secrets) { + this.configurations = Objects.requireNonNull(configurations); + this.secrets = Objects.requireNonNull(secrets); } - public String getModelId() { - return modelId; + public Model(ModelConfigurations configurations) { + this(configurations, new ModelSecrets()); } - public TaskType getTaskType() { - return taskType; + /** + * Returns the model's non-sensitive configurations (e.g. service name). + */ + public ModelConfigurations getConfigurations() { + return configurations; } - public String getService() { - return service; + /** + * Returns the model's sensitive configurations (e.g. api key). + * + * This returns an object that in json would look like: + * + *

+     * {@code
+     * {
+     *     "secret_settings": { "api_key": "abc" }
+     * }
+     * }
+     * 
+ */ + public ModelSecrets getSecrets() { + return secrets; } public ServiceSettings getServiceSettings() { - return serviceSettings; + return configurations.getServiceSettings(); } public TaskSettings getTaskSettings() { - return taskSettings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODEL_ID, modelId); - builder.field(TaskType.NAME, taskType.toString()); - builder.field(SERVICE, service); - builder.field(SERVICE_SETTINGS, serviceSettings); - builder.field(TASK_SETTINGS, taskSettings); - builder.endObject(); - return builder; + return configurations.getTaskSettings(); } - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_074; + /** + * Returns the inner sensitive data defined by a particular service. + * + * This returns an object that in json would look like: + * + *
+     * {@code
+     * {
+     *     "api_key": "abc"
+     * }
+     * }
+     * 
+ */ + public SecretSettings getSecretSettings() { + return secrets.getSecretSettings(); } @Override @@ -110,15 +81,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Model model = (Model) o; - return Objects.equals(modelId, model.modelId) - && taskType == model.taskType - && Objects.equals(service, model.service) - && Objects.equals(serviceSettings, model.serviceSettings) - && Objects.equals(taskSettings, model.taskSettings); + return Objects.equals(configurations, model.configurations) && Objects.equals(secrets, model.secrets); } @Override public int hashCode() { - return Objects.hash(modelId, taskType, service, serviceSettings, taskSettings); + return Objects.hash(configurations, secrets); } } diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java new file mode 100644 index 0000000000000..a8ae380bd3ba1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class ModelConfigurations implements ToXContentObject, VersionedNamedWriteable { + + public static final String MODEL_ID = "model_id"; + public static final String SERVICE = "service"; + public static final String SERVICE_SETTINGS = "service_settings"; + public static final String TASK_SETTINGS = "task_settings"; + private static final String NAME = "inference_model"; + + private final String modelId; + private final TaskType taskType; + private final String service; + private final ServiceSettings serviceSettings; + private final TaskSettings taskSettings; + + public ModelConfigurations( + String modelId, + TaskType taskType, + String service, + ServiceSettings serviceSettings, + TaskSettings taskSettings + ) { + this.modelId = modelId; + this.taskType = taskType; + this.service = service; + this.serviceSettings = serviceSettings; + this.taskSettings = taskSettings; + } + + public ModelConfigurations(StreamInput in) throws IOException { + this.modelId = in.readString(); + this.taskType = in.readEnum(TaskType.class); + this.service = in.readString(); + this.serviceSettings = in.readNamedWriteable(ServiceSettings.class); + this.taskSettings = in.readNamedWriteable(TaskSettings.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(modelId); + out.writeEnum(taskType); + out.writeString(service); + out.writeNamedWriteable(serviceSettings); + out.writeNamedWriteable(taskSettings); + } + + public String getModelId() { + return modelId; + } + + public TaskType getTaskType() { + return taskType; + } + + public String getService() { + return service; + } + + public ServiceSettings getServiceSettings() { + return serviceSettings; + } + + public TaskSettings getTaskSettings() { + return taskSettings; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MODEL_ID, modelId); + builder.field(TaskType.NAME, taskType.toString()); + builder.field(SERVICE, service); + builder.field(SERVICE_SETTINGS, serviceSettings); + builder.field(TASK_SETTINGS, taskSettings); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_500_074; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelConfigurations model = (ModelConfigurations) o; + return Objects.equals(modelId, model.modelId) + && taskType == model.taskType + && Objects.equals(service, model.service) + && Objects.equals(serviceSettings, model.serviceSettings) + && Objects.equals(taskSettings, model.taskSettings); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, taskType, service, serviceSettings, taskSettings); + } +} diff --git a/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java b/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java new file mode 100644 index 0000000000000..78199ae3029ba --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Represents the portion of a model that contains sensitive data + */ +public class ModelSecrets implements ToXContentObject, VersionedNamedWriteable { + public static final String SECRET_SETTINGS = "secret_settings"; + private static final String NAME = "inference_model_secrets"; + private final SecretSettings secretSettings; + + public ModelSecrets() { + this.secretSettings = null; + } + + public ModelSecrets(@Nullable SecretSettings secretSettings) { + // allow the secrets to be null in cases where the service does not have any secrets + this.secretSettings = secretSettings; + } + + public ModelSecrets(StreamInput in) throws IOException { + this(in.readOptionalNamedWriteable(SecretSettings.class)); + } + + public SecretSettings getSecretSettings() { + return secretSettings; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalNamedWriteable(secretSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (secretSettings != null) { + builder.field(SECRET_SETTINGS, secretSettings); + } + + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.INFERENCE_MODEL_SECRETS_ADDED; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelSecrets that = (ModelSecrets) o; + return Objects.equals(secretSettings, that.secretSettings); + } + + @Override + public int hashCode() { + return Objects.hash(secretSettings); + } +} diff --git a/server/src/main/java/org/elasticsearch/inference/SecretSettings.java b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java new file mode 100644 index 0000000000000..581f5dd442ea4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.xcontent.ToXContentObject; + +public interface SecretSettings extends ToXContentObject, VersionedNamedWriteable { + +} diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index df657abd55152..f70676aa2a2fc 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -1150,6 +1150,23 @@ public int hashCode() { return Objects.hash(snapshotState, version, startTimeMillis, endTimeMillis, slmPolicy); } + public static SnapshotDetails fromSnapshotInfo(SnapshotInfo snapshotInfo) { + return new SnapshotDetails( + snapshotInfo.state(), + snapshotInfo.version(), + snapshotInfo.startTime(), + snapshotInfo.endTime(), + slmPolicy(snapshotInfo.userMetadata()) + ); + } + + private static String slmPolicy(Map userMetadata) { + if (userMetadata != null && userMetadata.get(SnapshotsService.POLICY_ID_METADATA_FIELD) instanceof String policyId) { + return policyId; + } else { + return ""; + } + } } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 7e3a2b531d366..97b0448107330 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -51,6 +51,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.fs.FsBlobContainer; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; @@ -512,7 +513,7 @@ public void cloneShardSnapshot( final ShardGeneration existingShardGen; if (shardGeneration == null) { Tuple tuple = buildBlobStoreIndexShardSnapshots( - shardContainer.listBlobsByPrefix(INDEX_FILE_PREFIX).keySet(), + shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(), shardContainer ); existingShardGen = new ShardGeneration(tuple.v2()); @@ -786,11 +787,12 @@ public void deleteSnapshots( threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { - final Map rootBlobs = blobContainer().listBlobs(); + final Map rootBlobs = blobContainer().listBlobs(OperationPurpose.SNAPSHOT); final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, rootBlobs); // Cache the indices that were found before writing out the new index-N blob so that a stuck master will never // delete an index that was created by another master node after writing this index-N blob. - final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); + final Map foundIndices = blobStore().blobContainer(indicesPath()) + .children(OperationPurpose.SNAPSHOT); doDeleteShardSnapshots( snapshotIds, repositoryStateId, @@ -1045,7 +1047,7 @@ private void writeUpdatedShardMetaDataAndComputeDeletes( @Override protected void doRun() throws Exception { final BlobContainer shardContainer = shardContainer(indexId, finalShardId); - final Set blobs = shardContainer.listBlobs().keySet(); + final Set blobs = shardContainer.listBlobs(OperationPurpose.SNAPSHOT).keySet(); final BlobStoreIndexShardSnapshots blobStoreIndexShardSnapshots; final long newGen; if (useUUIDs) { @@ -1181,9 +1183,9 @@ public void cleanup(long repositoryStateId, IndexVersion repositoryMetaVersion, if (isReadOnly()) { throw new RepositoryException(metadata.name(), "cannot run cleanup on readonly repository"); } - Map rootBlobs = blobContainer().listBlobs(); + Map rootBlobs = blobContainer().listBlobs(OperationPurpose.SNAPSHOT); final RepositoryData repositoryData = safeRepositoryData(repositoryStateId, rootBlobs); - final Map foundIndices = blobStore().blobContainer(indicesPath()).children(); + final Map foundIndices = blobStore().blobContainer(indicesPath()).children(OperationPurpose.SNAPSHOT); final Set survivingIndexIds = repositoryData.getIndices() .values() .stream() @@ -1297,7 +1299,7 @@ private DeleteResult cleanupStaleIndices(Map foundIndices try { if (survivingIndexIds.contains(indexSnId) == false) { logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexSnId); - deleteResult = deleteResult.add(indexEntry.getValue().delete()); + deleteResult = deleteResult.add(indexEntry.getValue().delete(OperationPurpose.SNAPSHOT)); logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexSnId); } } catch (Exception e) { @@ -1430,14 +1432,7 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new // Update the root blob .andThen((l, metadataWriteResult) -> { // unlikely, but in theory we could still be on the thread which called finalizeSnapshot - TODO must fork to SNAPSHOT here - final String slmPolicy = slmPolicy(snapshotInfo); - final SnapshotDetails snapshotDetails = new SnapshotDetails( - snapshotInfo.state(), - IndexVersion.current(), - snapshotInfo.startTime(), - snapshotInfo.endTime(), - slmPolicy - ); + final var snapshotDetails = SnapshotDetails.fromSnapshotInfo(snapshotInfo); final var existingRepositoryData = metadataWriteResult.existingRepositoryData(); writeIndexGen( existingRepositoryData.addSnapshot( @@ -1635,7 +1630,7 @@ public String next() { } else { wrappedIterator = blobs; } - container.deleteBlobsIgnoringIfNotExists(wrappedIterator); + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, wrappedIterator); } private BlobPath indicesPath() { @@ -1755,7 +1750,7 @@ public String startVerification() { String seed = UUIDs.randomBase64UUID(); byte[] testBytes = Strings.toUTF8Bytes(seed); BlobContainer testContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); - testContainer.writeBlobAtomic("master.dat", new BytesArray(testBytes), true); + testContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT, "master.dat", new BytesArray(testBytes), true); return seed; } } catch (Exception exp) { @@ -1768,7 +1763,7 @@ public void endVerification(String seed) { if (isReadOnly() == false) { try { final String testPrefix = testBlobPrefix(seed); - blobStore().blobContainer(basePath().add(testPrefix)).delete(); + blobStore().blobContainer(basePath().add(testPrefix)).delete(OperationPurpose.SNAPSHOT); } catch (Exception exp) { throw new RepositoryVerificationException(metadata.name(), "cannot delete test data at " + basePath(), exp); } @@ -2187,7 +2182,7 @@ private RepositoryData getRepositoryData(long indexGen) { // EMPTY is safe here because RepositoryData#fromXContent calls namedObject try ( - InputStream blob = blobContainer().readBlob(snapshotsIndexBlobName); + InputStream blob = blobContainer().readBlob(OperationPurpose.SNAPSHOT, snapshotsIndexBlobName); XContentParser parser = XContentType.JSON.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, blob) ) { @@ -2364,17 +2359,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) final Map extraDetailsMap = new ConcurrentHashMap<>(); getSnapshotInfo( new GetSnapshotInfoContext(snapshotIdsWithMissingDetails, false, () -> false, (context, snapshotInfo) -> { - final String slmPolicy = slmPolicy(snapshotInfo); - extraDetailsMap.put( - snapshotInfo.snapshotId(), - new SnapshotDetails( - snapshotInfo.state(), - snapshotInfo.version(), - snapshotInfo.startTime(), - snapshotInfo.endTime(), - slmPolicy - ) - ); + extraDetailsMap.put(snapshotInfo.snapshotId(), SnapshotDetails.fromSnapshotInfo(snapshotInfo)); }, ActionListener.runAfter(new ActionListener<>() { @Override public void onResponse(Void aVoid) { @@ -2479,24 +2464,6 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) })); } - /** - * Extract slm policy from snapshot info. If none can be found, empty string is returned. - */ - private static String slmPolicy(SnapshotInfo snapshotInfo) { - final String slmPolicy; - if (snapshotInfo.userMetadata() == null) { - slmPolicy = ""; - } else { - final Object policyFound = snapshotInfo.userMetadata().get(SnapshotsService.POLICY_ID_METADATA_FIELD); - if (policyFound instanceof String) { - slmPolicy = (String) policyFound; - } else { - slmPolicy = ""; - } - } - return slmPolicy; - } - private RepositoryData updateRepositoryData(RepositoryData repositoryData, IndexVersion repositoryMetaversion, long newGen) { if (SnapshotsService.includesUUIDs(repositoryMetaversion)) { final String clusterUUID = clusterService.state().metadata().clusterUUID(); @@ -2542,7 +2509,8 @@ private void maybeWriteIndexLatest(long newGen) { */ private boolean ensureSafeGenerationExists(long safeGeneration, Consumer onFailure) throws IOException { logger.debug("Ensure generation [{}] that is the basis for this write exists in [{}]", safeGeneration, metadata.name()); - if (safeGeneration != RepositoryData.EMPTY_REPO_GEN && blobContainer().blobExists(INDEX_FILE_PREFIX + safeGeneration) == false) { + if (safeGeneration != RepositoryData.EMPTY_REPO_GEN + && blobContainer().blobExists(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX + safeGeneration) == false) { Tuple previousWriterInfo = null; Exception readRepoDataEx = null; try { @@ -2671,7 +2639,9 @@ long latestIndexBlobId() throws IOException { // package private for testing long readSnapshotIndexLatestBlob() throws IOException { - final BytesReference content = Streams.readFully(Streams.limitStream(blobContainer().readBlob(INDEX_LATEST_BLOB), Long.BYTES + 1)); + final BytesReference content = Streams.readFully( + Streams.limitStream(blobContainer().readBlob(OperationPurpose.SNAPSHOT, INDEX_LATEST_BLOB), Long.BYTES + 1) + ); if (content.length() != Long.BYTES) { throw new RepositoryException( metadata.name(), @@ -2685,7 +2655,7 @@ long readSnapshotIndexLatestBlob() throws IOException { } private long listBlobsToGetLatestIndexId() throws IOException { - return latestGeneration(blobContainer().listBlobsByPrefix(INDEX_FILE_PREFIX).keySet()); + return latestGeneration(blobContainer().listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet()); } private long latestGeneration(Collection rootBlobs) { @@ -2713,7 +2683,7 @@ private void writeAtomic( boolean failIfAlreadyExists ) throws IOException { logger.trace(() -> format("[%s] Writing [%s] to %s atomically", metadata.name(), blobName, container.path())); - container.writeMetadataBlob(blobName, failIfAlreadyExists, true, writer); + container.writeMetadataBlob(OperationPurpose.SNAPSHOT, blobName, failIfAlreadyExists, true, writer); } @Override @@ -2739,7 +2709,7 @@ private void doSnapshotShard(SnapshotShardContext context) { if (generation == null) { snapshotStatus.ensureNotAborted(); try { - blobs = shardContainer.listBlobsByPrefix(INDEX_FILE_PREFIX).keySet(); + blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(); } catch (IOException e) { throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e); } @@ -3149,7 +3119,7 @@ private void restoreFile(BlobStoreIndexShardSnapshot.FileInfo fileInfo, Store st @Override protected InputStream openSlice(int slice) throws IOException { ensureNotClosing(store); - return container.readBlob(fileInfo.partName(slice)); + return container.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(slice)); } })) { final byte[] buffer = new byte[Math.toIntExact(Math.min(bufferSize, fileInfo.length()))]; @@ -3288,7 +3258,7 @@ public void verify(String seed, DiscoveryNode localNode) { } else { BlobContainer testBlobContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); try { - testBlobContainer.writeBlob("data-" + localNode.getId() + ".dat", new BytesArray(seed), true); + testBlobContainer.writeBlob(OperationPurpose.SNAPSHOT, "data-" + localNode.getId() + ".dat", new BytesArray(seed), true); } catch (Exception exp) { throw new RepositoryVerificationException( metadata.name(), @@ -3296,7 +3266,7 @@ public void verify(String seed, DiscoveryNode localNode) { exp ); } - try (InputStream masterDat = testBlobContainer.readBlob("master.dat")) { + try (InputStream masterDat = testBlobContainer.readBlob(OperationPurpose.SNAPSHOT, "master.dat")) { final String seedRead = Streams.readFully(masterDat).utf8ToString(); if (seedRead.equals(seed) == false) { throw new RepositoryVerificationException( @@ -3450,7 +3420,7 @@ public BlobStoreIndexShardSnapshots getBlobStoreIndexShardSnapshots(IndexId inde Set blobs = Collections.emptySet(); if (shardGen == null) { - blobs = shardContainer.listBlobsByPrefix(INDEX_FILE_PREFIX).keySet(); + blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(); } return buildBlobStoreIndexShardSnapshots(blobs, shardContainer, shardGen).v1(); @@ -3555,7 +3525,7 @@ private void checkAborted() { final String partName = fileInfo.partName(i); logger.trace("[{}] Writing [{}] to [{}]", metadata.name(), partName, shardContainer.path()); final long startMS = threadPool.relativeTimeInMillis(); - shardContainer.writeBlob(partName, inputStream, partBytes, false); + shardContainer.writeBlob(OperationPurpose.SNAPSHOT, partName, inputStream, partBytes, false); logger.trace( "[{}] Writing [{}] of size [{}b] to [{}] took [{}ms]", metadata.name(), diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index 5e4f429a9076f..54cb6fe7c45d3 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; @@ -117,7 +118,7 @@ public ChecksumBlobStoreFormat( public T read(String repoName, BlobContainer blobContainer, String name, NamedXContentRegistry namedXContentRegistry) throws IOException { String blobName = blobName(name); - try (InputStream in = blobContainer.readBlob(blobName)) { + try (InputStream in = blobContainer.readBlob(OperationPurpose.SNAPSHOT, blobName)) { return deserialize(repoName, namedXContentRegistry, in); } } @@ -343,7 +344,13 @@ public void write(T obj, BlobContainer blobContainer, String name, boolean compr public void write(T obj, BlobContainer blobContainer, String name, boolean compress, Map serializationParams) throws IOException { final String blobName = blobName(name); - blobContainer.writeMetadataBlob(blobName, false, false, out -> serialize(obj, blobName, compress, serializationParams, out)); + blobContainer.writeMetadataBlob( + OperationPurpose.SNAPSHOT, + blobName, + false, + false, + out -> serialize(obj, blobName, compress, serializationParams, out) + ); } public void serialize(final T obj, final String blobName, final boolean compress, final OutputStream outputStream) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestStatusToXContentListener.java b/server/src/main/java/org/elasticsearch/rest/action/RestStatusToXContentListener.java deleted file mode 100644 index 178146c49480b..0000000000000 --- a/server/src/main/java/org/elasticsearch/rest/action/RestStatusToXContentListener.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.rest.action; - -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.util.function.Function; - -/** - * Content listener that extracts that {@link RestStatus} from the response. - */ -public class RestStatusToXContentListener extends RestToXContentListener { - private final Function extractLocation; - - /** - * Build an instance that doesn't support responses with the status {@code 201 CREATED}. - */ - public RestStatusToXContentListener(RestChannel channel) { - this(channel, r -> { - assert false : "Returned a 201 CREATED but not set up to support a Location header"; - return null; - }); - } - - /** - * Build an instance that does support responses with the status {@code 201 CREATED}. - */ - public RestStatusToXContentListener(RestChannel channel, Function extractLocation) { - super(channel); - this.extractLocation = extractLocation; - } - - @Override - public RestResponse buildResponse(Response response, XContentBuilder builder) throws Exception { - assert response.isFragment() == false; // would be nice if we could make default methods final - response.toXContent(builder, channel.request()); - RestResponse restResponse = new RestResponse(response.status(), builder); - if (RestStatus.CREATED == restResponse.status()) { - final String location = extractLocation.apply(response); - if (location != null) { - restResponse.addHeader("Location", location); - } - } - return restResponse; - } -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java b/server/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java index 4685eb51c436b..cc759985f7d1f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestToXContentListener.java @@ -14,23 +14,50 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import java.util.function.Function; + /** * A REST based action listener that requires the response to implement {@link ToXContentObject} and automatically * builds an XContent based response. */ +// TODO make this final public class RestToXContentListener extends RestBuilderListener { + protected final Function statusFunction; + private final Function locationFunction; + public RestToXContentListener(RestChannel channel) { + this(channel, r -> RestStatus.OK); + } + + public RestToXContentListener(RestChannel channel, Function statusFunction) { + this(channel, statusFunction, r -> { + assert false : "Returned a 201 CREATED but not set up to support a Location header from " + r.getClass(); + return null; + }); + } + + public RestToXContentListener( + RestChannel channel, + Function statusFunction, + Function locationFunction + ) { super(channel); + this.statusFunction = statusFunction; + this.locationFunction = locationFunction; } public RestResponse buildResponse(Response response, XContentBuilder builder) throws Exception { assert response.isFragment() == false; // would be nice if we could make default methods final response.toXContent(builder, channel.request()); - return new RestResponse(getStatus(response), builder); - } - - protected RestStatus getStatus(Response response) { - return RestStatus.OK; + RestStatus restStatus = statusFunction.apply(response); + RestResponse r = new RestResponse(restStatus, builder); + if (RestStatus.CREATED == restStatus) { + final String location = locationFunction.apply(response); + if (location != null) { + r.addHeader("Location", location); + } + } + return r; } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java index 88093ea5c7ea4..6518ccc6e0c94 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ClusterStatsLevel; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; @@ -21,7 +22,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; import java.util.Collections; @@ -54,7 +55,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final ClusterHealthRequest clusterHealthRequest = fromRequest(request); return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin() .cluster() - .health(clusterHealthRequest, new RestStatusToXContentListener<>(channel)); + .health(clusterHealthRequest, new RestToXContentListener<>(channel, ClusterHealthResponse::status)); } public static ClusterHealthRequest fromRequest(final RestRequest request) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java index 2bf2afd7ecb2f..b0d5bce981f2a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java @@ -8,12 +8,13 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; import java.util.List; @@ -38,6 +39,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient String id = request.param("id"); GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); getRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRequest.masterNodeTimeout())); - return channel -> client.admin().cluster().getStoredScript(getRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.admin() + .cluster() + .getStoredScript(getRequest, new RestToXContentListener<>(channel, GetStoredScriptResponse::status)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestResetFeatureStateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestResetFeatureStateAction.java index a11cc65d1e92a..dcf6a1d165e7a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestResetFeatureStateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestResetFeatureStateAction.java @@ -45,20 +45,17 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { final ResetFeatureStateRequest req = new ResetFeatureStateRequest(); - return restChannel -> client.execute(ResetFeatureStateAction.INSTANCE, req, new RestToXContentListener<>(restChannel) { - @Override - protected RestStatus getStatus(ResetFeatureStateResponse response) { - long failures = response.getFeatureStateResetStatuses() - .stream() - .filter(status -> status.getStatus() == ResetFeatureStateResponse.ResetFeatureStateStatus.Status.FAILURE) - .count(); - if (failures == 0) { - return RestStatus.OK; - } else if (failures == response.getFeatureStateResetStatuses().size()) { - return RestStatus.INTERNAL_SERVER_ERROR; - } - return RestStatus.MULTI_STATUS; + return restChannel -> client.execute(ResetFeatureStateAction.INSTANCE, req, new RestToXContentListener<>(restChannel, r -> { + long failures = r.getFeatureStateResetStatuses() + .stream() + .filter(status -> status.getStatus() == ResetFeatureStateResponse.ResetFeatureStateStatus.Status.FAILURE) + .count(); + if (failures == 0) { + return RestStatus.OK; + } else if (failures == r.getFeatureStateResetStatuses().size()) { + return RestStatus.INTERNAL_SERVER_ERROR; } - }); + return RestStatus.MULTI_STATUS; + })); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java index 56d475ecfab7f..4213f42549cd7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java @@ -9,11 +9,9 @@ package org.elasticsearch.rest.action.admin.cluster.dangling; import org.elasticsearch.action.admin.indices.dangling.delete.DeleteDanglingIndexRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -44,11 +42,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient deleteRequest.timeout(request.paramAsTime("timeout", deleteRequest.timeout())); deleteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteRequest.masterNodeTimeout())); - return channel -> client.admin().cluster().deleteDanglingIndex(deleteRequest, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(AcknowledgedResponse acknowledgedResponse) { - return ACCEPTED; - } - }); + return channel -> client.admin().cluster().deleteDanglingIndex(deleteRequest, new RestToXContentListener<>(channel, r -> ACCEPTED)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java index 4eebc06d64902..7f481c16118bd 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java @@ -9,11 +9,9 @@ package org.elasticsearch.rest.action.admin.cluster.dangling; import org.elasticsearch.action.admin.indices.dangling.import_index.ImportDanglingIndexRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -43,11 +41,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient importRequest.timeout(request.paramAsTime("timeout", importRequest.timeout())); importRequest.masterNodeTimeout(request.paramAsTime("master_timeout", importRequest.masterNodeTimeout())); - return channel -> client.admin().cluster().importDanglingIndex(importRequest, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(AcknowledgedResponse acknowledgedResponse) { - return ACCEPTED; - } - }); + return channel -> client.admin().cluster().importDanglingIndex(importRequest, new RestToXContentListener<>(channel, r -> ACCEPTED)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java index 5827e1519fb42..84e7865d9f699 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -54,13 +53,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final boolean implicitAll = getRequest.name() == null; - return channel -> client.execute(GetComponentTemplateAction.INSTANCE, getRequest, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(final GetComponentTemplateAction.Response response) { - final boolean templateExists = response.getComponentTemplates().isEmpty() == false; - return (templateExists || implicitAll) ? OK : NOT_FOUND; - } - }); + return channel -> client.execute(GetComponentTemplateAction.INSTANCE, getRequest, new RestToXContentListener<>(channel, r -> { + final boolean templateExists = r.getComponentTemplates().isEmpty() == false; + return (templateExists || implicitAll) ? OK : NOT_FOUND; + })); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java index 456616568788f..0981d5820131e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComposableIndexTemplateAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -53,13 +52,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); final boolean implicitAll = getRequest.name() == null; - return channel -> client.execute(GetComposableIndexTemplateAction.INSTANCE, getRequest, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(final GetComposableIndexTemplateAction.Response response) { - final boolean templateExists = response.indexTemplates().isEmpty() == false; - return (templateExists || implicitAll) ? OK : NOT_FOUND; - } - }); + return channel -> client.execute(GetComposableIndexTemplateAction.INSTANCE, getRequest, new RestToXContentListener<>(channel, r -> { + final boolean templateExists = r.indexTemplates().isEmpty() == false; + return (templateExists || implicitAll) ? OK : NOT_FOUND; + })); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index 363702c88e641..2efcfe3bef119 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; @@ -17,7 +16,6 @@ import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -65,13 +63,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final boolean implicitAll = getIndexTemplatesRequest.names().length == 0; - return channel -> client.admin().indices().getTemplates(getIndexTemplatesRequest, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(final GetIndexTemplatesResponse response) { - final boolean templateExists = response.getIndexTemplates().isEmpty() == false; - return (templateExists || implicitAll) ? OK : NOT_FOUND; - } - }); + return channel -> client.admin().indices().getTemplates(getIndexTemplatesRequest, new RestToXContentListener<>(channel, r -> { + final boolean templateExists = r.getIndexTemplates().isEmpty() == false; + return (templateExists || implicitAll) ? OK : NOT_FOUND; + })); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java index 59040f45344d7..0882cd076dfc8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java @@ -9,13 +9,12 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -48,11 +47,8 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { RefreshRequest refreshRequest = new RefreshRequest(Strings.splitStringByCommaToArray(request.param("index"))); refreshRequest.indicesOptions(IndicesOptions.fromRequest(request, refreshRequest.indicesOptions())); - return channel -> client.admin().indices().refresh(refreshRequest, new RestToXContentListener(channel) { - @Override - protected RestStatus getStatus(RefreshResponse response) { - return response.getStatus(); - } - }); + return channel -> client.admin() + .indices() + .refresh(refreshRequest, new RestToXContentListener<>(channel, BaseBroadcastResponse::getStatus)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java index 5d5ad7559cbef..52ffd42516531 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; @@ -18,7 +19,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; import java.util.List; @@ -62,6 +63,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC deleteRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); } - return channel -> client.delete(deleteRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.delete(deleteRequest, new RestToXContentListener<>(channel, DeleteResponse::status)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index 21d0258c1e988..13424cfc82780 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -9,14 +9,12 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; @@ -85,12 +83,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getRequest.setForceSyntheticSource(true); } - return channel -> client.get(getRequest, new RestToXContentListener(channel) { - @Override - protected RestStatus getStatus(final GetResponse response) { - return response.isExists() ? OK : NOT_FOUND; - } - }); + return channel -> client.get(getRequest, new RestToXContentListener<>(channel, r -> r.isExists() ? OK : NOT_FOUND)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 9a943fc1e6cf5..e5c70fa4fe188 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; @@ -21,7 +22,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; import java.util.List; @@ -146,7 +147,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.index( indexRequest, - new RestStatusToXContentListener<>(channel, r -> r.getLocation(indexRequest.routing())) + new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())) ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index 3428c9a54a9ae..aff71cba2d858 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; @@ -21,7 +22,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -98,7 +99,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.update( updateRequest, - new RestStatusToXContentListener<>(channel, r -> r.getLocation(updateRequest.routing())) + new RestToXContentListener<>(channel, UpdateResponse::status, r -> r.getLocation(updateRequest.routing())) ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index b095dde11943c..e87a78c6b658e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; import java.util.List; @@ -42,6 +43,6 @@ public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient cl Strings.splitStringByCommaToArray(restRequest.param("id")) ); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); - return channel -> client.admin().cluster().getPipeline(request, new RestStatusToXContentListener<>(channel)); + return channel -> client.admin().cluster().getPipeline(request, new RestToXContentListener<>(channel, GetPipelineResponse::status)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index 179e545ff62ed..cadae92277d34 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.rest.action.search; import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParseException; import java.io.IOException; @@ -53,7 +54,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } })); - return channel -> client.clearScroll(clearRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.clearScroll(clearRequest, new RestToXContentListener<>(channel, ClearScrollResponse::status)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 73d09ddaad489..037ca41a8e196 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.search; import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; @@ -18,7 +19,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -84,6 +85,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC explainRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request)); - return channel -> client.explain(explainRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.explain(explainRequest, new RestToXContentListener<>(channel, ExplainResponse::status)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymRuleAction.java b/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymRuleAction.java index b03165cea6a50..790e62729f110 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymRuleAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymRuleAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -44,11 +43,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient restRequest.content(), restRequest.getXContentType() ); - return channel -> client.execute(PutSynonymRuleAction.INSTANCE, request, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(SynonymUpdateResponse response) { - return response.status(); - } - }); + return channel -> client.execute( + PutSynonymRuleAction.INSTANCE, + request, + new RestToXContentListener<>(channel, SynonymUpdateResponse::status, r -> null) + ); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymsAction.java b/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymsAction.java index 2b59a2603bf83..e16db62a9ff96 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/synonyms/RestPutSynonymsAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -43,11 +42,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient restRequest.content(), restRequest.getXContentType() ); - return channel -> client.execute(PutSynonymsAction.INSTANCE, request, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(SynonymUpdateResponse response) { - return response.status(); - } - }); + return channel -> client.execute( + PutSynonymsAction.INSTANCE, + request, + new RestToXContentListener<>(channel, SynonymUpdateResponse::status, r -> null) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java index 88a78a512d1bd..273df99f6479c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionContext.java @@ -53,6 +53,12 @@ public long getTimestamp() { } public int getTsidOrd() { + if (tsidOrdProvider == null) { + throw new IllegalArgumentException( + "Aggregation on a time-series field is misconfigured, likely due to lack of wrapping " + + "a metric aggregation within a `time-series` aggregation" + ); + } return tsidOrdProvider.getAsInt(); } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index b0da930cd17b6..a72ef9e83ccf2 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -12,9 +12,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.Requests; @@ -91,9 +91,9 @@ public void storeResult(TaskResult taskResult, ActionListener listener) { } private void doStoreResult(Iterator backoff, IndexRequestBuilder index, ActionListener listener) { - index.execute(new ActionListener() { + index.execute(new ActionListener() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { listener.onResponse(null); } diff --git a/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java b/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java index 0df8aeedac7f8..add994787227f 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java +++ b/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java @@ -8,11 +8,15 @@ package org.elasticsearch.telemetry; +import org.elasticsearch.telemetry.metric.Meter; import org.elasticsearch.telemetry.tracing.Tracer; public interface TelemetryProvider { + Tracer getTracer(); + Meter getMeter(); + TelemetryProvider NOOP = new TelemetryProvider() { @Override @@ -20,5 +24,9 @@ public Tracer getTracer() { return Tracer.NOOP; } + @Override + public Meter getMeter() { + return Meter.NOOP; + } }; } diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleCounter.java new file mode 100644 index 0000000000000..c98701bb0a1bb --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleCounter.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A monotonically increasing metric that uses a double. + * Useful for capturing the number of bytes received, number of requests, etc. + */ +public interface DoubleCounter extends Instrument { + /** + * Add one to the current counter. + */ + void increment(); + + /** + * Increment the counter. + * @param inc amount to increment, non-negative + */ + void incrementBy(double inc); + + /** + * Increment the counter. + * @param inc amount to increment, non-negative + * @param attributes key-value pairs to associate with this increment + */ + void incrementBy(double inc, Map attributes); + + /** + * Noop counter for use in tests. + */ + DoubleCounter NOOP = new DoubleCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void increment() { + + } + + @Override + public void incrementBy(double inc) { + + } + + @Override + public void incrementBy(double inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java new file mode 100644 index 0000000000000..797c125900bb8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record non-additive double values. eg number of running threads, current load + */ +public interface DoubleGauge extends Instrument { + /** + * Record the current value for measured item + */ + void record(double value); + + /** + * Record the current value + * @param attributes key-value pairs to associate with the current measurement + */ + void record(double value, Map attributes); + + /** + * Noop gauge for tests + */ + DoubleGauge NOOP = new DoubleGauge() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(double value) { + + } + + @Override + public void record(double value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleHistogram.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleHistogram.java new file mode 100644 index 0000000000000..11958ea36cd3d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleHistogram.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record arbitrary values that are summarized statistically, useful for percentiles and histograms. + */ +public interface DoubleHistogram extends Instrument { + /** + * Record a sample for the measured item + * @param value + */ + void record(double value); + + /** + * Record a sample for the measured item + * @param attributes key-value pairs to associate with the current sample + */ + void record(double value, Map attributes); + + /** + * Noop histogram for tests + */ + DoubleHistogram NOOP = new DoubleHistogram() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(double value) { + + } + + @Override + public void record(double value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleUpDownCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleUpDownCounter.java new file mode 100644 index 0000000000000..7d484ebf07d32 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleUpDownCounter.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A counter that supports decreasing and increasing values. + * Useful for capturing the number of requests in a queue. + */ +public interface DoubleUpDownCounter extends Instrument { + /** + * Add to the counter + * @param inc may be negative. + */ + void add(double inc); + + /** + * Add to the counter + * @param inc may be negative. + * @param attributes key-value pairs to associate with this increment + */ + void add(double inc, Map attributes); + + /** + * Noop counter for use in tests + */ + DoubleUpDownCounter NOOP = new DoubleUpDownCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void add(double inc) { + + } + + @Override + public void add(double inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/Instrument.java b/server/src/main/java/org/elasticsearch/telemetry/metric/Instrument.java new file mode 100644 index 0000000000000..19a7e259120f2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/Instrument.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +public interface Instrument { + String getName(); +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongCounter.java new file mode 100644 index 0000000000000..f8f2150163835 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongCounter.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A monotonically increasing metric that uses a long. Useful for integral values such as the number of bytes received, + * number of requests, etc. + */ +public interface LongCounter extends Instrument { + /** + * Add one to the current counter + */ + void increment(); + + /** + * Increment the counter + * @param inc amount to increment + */ + void incrementBy(long inc); + + /** + * Increment the counter. + * @param inc amount to increment + * @param attributes key-value pairs to associate with this increment + */ + void incrementBy(long inc, Map attributes); + + /** + * Noop counter for use in tests. + */ + LongCounter NOOP = new LongCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void increment() { + + } + + @Override + public void incrementBy(long inc) { + + } + + @Override + public void incrementBy(long inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java new file mode 100644 index 0000000000000..71539064ce53e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record non-additive long values. + */ +public interface LongGauge extends Instrument { + + /** + * Record the current value of the measured item. + * @param value + */ + void record(long value); + + /** + * Record the current value + * @param attributes key-value pairs to associate with the current measurement + */ + void record(long value, Map attributes); + + /** + * Noop gauge for tests + */ + LongGauge NOOP = new LongGauge() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(long value) { + + } + + @Override + public void record(long value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java new file mode 100644 index 0000000000000..27d5261f755ef --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongHistogram.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * Record arbitrary values that are summarized statistically, useful for percentiles and histograms. + */ +public interface LongHistogram extends Instrument { + /** + * Record a sample for the measured item + * @param value + */ + void record(long value); + + /** + * Record a sample for the measured item + * @param attributes key-value pairs to associate with the current sample + */ + void record(long value, Map attributes); + + /** + * Noop histogram for tests + */ + LongHistogram NOOP = new LongHistogram() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void record(long value) { + + } + + @Override + public void record(long value, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongUpDownCounter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongUpDownCounter.java new file mode 100644 index 0000000000000..f62030da8f6bd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongUpDownCounter.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +/** + * A counter that supports decreasing and increasing values. + * Useful for capturing the number of requests in a queue. + */ +public interface LongUpDownCounter extends Instrument { + /** + * Add to the counter + * @param inc may be negative. + */ + void add(long inc); + + /** + * Add to the counter + * @param inc may be negative. + * @param attributes key-value pairs to associate with this increment + */ + void add(long inc, Map attributes); + + /** + * Noop counter for use in tests + */ + LongUpDownCounter NOOP = new LongUpDownCounter() { + @Override + public String getName() { + return "noop"; + } + + @Override + public void add(long inc) { + + } + + @Override + public void add(long inc, Map attributes) { + + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java new file mode 100644 index 0000000000000..77bbf6f673fd3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java @@ -0,0 +1,228 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +/** + * Container for metering instruments. Meters with the same name and type (DoubleCounter, etc) can + * only be registered once. + * TODO(stu): describe name, unit and description + */ +public interface Meter { + /** + * Register a {@link DoubleCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleCounter registerDoubleCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleCounter}. + * @param name name of the counter + * @return the registered meter. + */ + DoubleCounter getDoubleCounter(String name); + + /** + * Register a {@link DoubleUpDownCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleUpDownCounter}. + * @param name name of the counter + * @return the registered meter. + */ + DoubleUpDownCounter getDoubleUpDownCounter(String name); + + /** + * Register a {@link DoubleGauge}. The returned object may be reused. + * @param name name of the gauge + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleGauge registerDoubleGauge(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleGauge}. + * @param name name of the gauge + * @return the registered meter. + */ + DoubleGauge getDoubleGauge(String name); + + /** + * Register a {@link DoubleHistogram}. The returned object may be reused. + * @param name name of the histogram + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + DoubleHistogram registerDoubleHistogram(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link DoubleHistogram}. + * @param name name of the histogram + * @return the registered meter. + */ + DoubleHistogram getDoubleHistogram(String name); + + /** + * Register a {@link LongCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongCounter registerLongCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongCounter}. + * @param name name of the counter + * @return the registered meter. + */ + LongCounter getLongCounter(String name); + + /** + * Register a {@link LongUpDownCounter}. The returned object may be reused. + * @param name name of the counter + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongUpDownCounter}. + * @param name name of the counter + * @return the registered meter. + */ + LongUpDownCounter getLongUpDownCounter(String name); + + /** + * Register a {@link LongGauge}. The returned object may be reused. + * @param name name of the gauge + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongGauge registerLongGauge(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongGauge}. + * @param name name of the gauge + * @return the registered meter. + */ + LongGauge getLongGauge(String name); + + /** + * Register a {@link LongHistogram}. The returned object may be reused. + * @param name name of the histogram + * @param description description of purpose + * @param unit the unit (bytes, sec, hour) + * @return the registered meter. + */ + LongHistogram registerLongHistogram(String name, String description, String unit); + + /** + * Retrieved a previously registered {@link LongHistogram}. + * @param name name of the histogram + * @return the registered meter. + */ + LongHistogram getLongHistogram(String name); + + /** + * Noop implementation for tests + */ + Meter NOOP = new Meter() { + @Override + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + return DoubleCounter.NOOP; + } + + @Override + public DoubleCounter getDoubleCounter(String name) { + return DoubleCounter.NOOP; + } + + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + return DoubleUpDownCounter.NOOP; + } + + @Override + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return DoubleUpDownCounter.NOOP; + } + + @Override + public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + return DoubleGauge.NOOP; + } + + @Override + public DoubleGauge getDoubleGauge(String name) { + return DoubleGauge.NOOP; + } + + @Override + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + return DoubleHistogram.NOOP; + } + + @Override + public DoubleHistogram getDoubleHistogram(String name) { + return DoubleHistogram.NOOP; + } + + @Override + public LongCounter registerLongCounter(String name, String description, String unit) { + return LongCounter.NOOP; + } + + @Override + public LongCounter getLongCounter(String name) { + return LongCounter.NOOP; + } + + @Override + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + return LongUpDownCounter.NOOP; + } + + @Override + public LongUpDownCounter getLongUpDownCounter(String name) { + return LongUpDownCounter.NOOP; + } + + @Override + public LongGauge registerLongGauge(String name, String description, String unit) { + return LongGauge.NOOP; + } + + @Override + public LongGauge getLongGauge(String name) { + return LongGauge.NOOP; + } + + @Override + public LongHistogram registerLongHistogram(String name, String description, String unit) { + return LongHistogram.NOOP; + } + + @Override + public LongHistogram getLongHistogram(String name) { + return LongHistogram.NOOP; + } + }; +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 31e973a9e9268..de28a7c7bd99d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -9,8 +9,8 @@ package org.elasticsearch.action.admin.indices.stats; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -107,7 +107,7 @@ public void testRefreshListeners() throws Exception { createIndex("test", Settings.builder().put("refresh_interval", -1).build()); // Index a document asynchronously so the request will only return when document is refreshed - ActionFuture index = client().prepareIndex("test") + ActionFuture index = client().prepareIndex("test") .setId("test") .setSource("test", "test") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 9acb63db2cc90..befea803e6fa0 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -590,7 +590,6 @@ public void testToXContent() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100005") public void testSerialization() throws IOException { SearchResponse searchResponse = createTestItem(false); SearchResponse deserialized = copyWriteable( diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java index f3d2fafd2e3c3..bb4aefc0388e6 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -86,7 +87,7 @@ public void testReadBlobRangeCorrectlySkipBytes() throws IOException { final long start = randomLongBetween(0L, Math.max(0L, blobData.length - 1)); final long length = randomLongBetween(1L, blobData.length - start); - try (InputStream stream = container.readBlob(blobName, start, length)) { + try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, blobName, start, length)) { assertThat(totalBytesRead.get(), equalTo(0L)); assertThat(Streams.consumeFully(stream), equalTo(length)); assertThat(totalBytesRead.get(), equalTo(length)); @@ -118,11 +119,11 @@ public void testDeleteIgnoringIfNotExistsDoesNotThrowFileNotFound() throws IOExc path ); - container.deleteBlobsIgnoringIfNotExists(List.of(blobName).listIterator()); + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, List.of(blobName).listIterator()); // Should not throw exception - container.deleteBlobsIgnoringIfNotExists(List.of(blobName).listIterator()); + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, List.of(blobName).listIterator()); - assertFalse(container.blobExists(blobName)); + assertFalse(container.blobExists(OperationPurpose.SNAPSHOT, blobName)); } private static BytesReference getBytesAsync(Consumer> consumer) { @@ -149,10 +150,11 @@ public void testCompareAndExchange() throws Exception { for (int i = 0; i < 5; i++) { switch (between(1, 4)) { - case 1 -> assertEquals(expectedValue.get(), getBytesAsync(l -> container.getRegister(key, l))); + case 1 -> assertEquals(expectedValue.get(), getBytesAsync(l -> container.getRegister(OperationPurpose.SNAPSHOT, key, l))); case 2 -> assertFalse( getAsync( l -> container.compareAndSetRegister( + OperationPurpose.SNAPSHOT, key, randomValueOtherThan(expectedValue.get(), () -> new BytesArray(randomByteArrayOfLength(8))), new BytesArray(randomByteArrayOfLength(8)), @@ -164,6 +166,7 @@ public void testCompareAndExchange() throws Exception { expectedValue.get(), getBytesAsync( l -> container.compareAndExchangeRegister( + OperationPurpose.SNAPSHOT, key, randomValueOtherThan(expectedValue.get(), () -> new BytesArray(randomByteArrayOfLength(8))), new BytesArray(randomByteArrayOfLength(8)), @@ -178,20 +181,26 @@ public void testCompareAndExchange() throws Exception { final var newValue = new BytesArray(randomByteArrayOfLength(8)); if (randomBoolean()) { - assertTrue(getAsync(l -> container.compareAndSetRegister(key, expectedValue.get(), newValue, l))); + assertTrue( + getAsync(l -> container.compareAndSetRegister(OperationPurpose.SNAPSHOT, key, expectedValue.get(), newValue, l)) + ); } else { assertEquals( expectedValue.get(), - getBytesAsync(l -> container.compareAndExchangeRegister(key, expectedValue.get(), newValue, l)) + getBytesAsync( + l -> container.compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, expectedValue.get(), newValue, l) + ) ); } expectedValue.set(newValue); } - container.writeBlob(key, new BytesArray(new byte[17]), false); + container.writeBlob(OperationPurpose.SNAPSHOT, key, new BytesArray(new byte[17]), false); expectThrows( IllegalStateException.class, - () -> getBytesAsync(l -> container.compareAndExchangeRegister(key, expectedValue.get(), BytesArray.EMPTY, l)) + () -> getBytesAsync( + l -> container.compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, expectedValue.get(), BytesArray.EMPTY, l) + ) ); } @@ -225,15 +234,25 @@ private static void checkAtomicWrite() throws IOException { BlobPath.EMPTY, path ); - container.writeBlobAtomic(blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true); + container.writeBlobAtomic( + OperationPurpose.SNAPSHOT, + blobName, + new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), + true + ); final var blobData = new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))); - container.writeBlobAtomic(blobName, blobData, false); - assertEquals(blobData, Streams.readFully(container.readBlob(blobName))); + container.writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, blobData, false); + assertEquals(blobData, Streams.readFully(container.readBlob(OperationPurpose.SNAPSHOT, blobName))); expectThrows( FileAlreadyExistsException.class, - () -> container.writeBlobAtomic(blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true) + () -> container.writeBlobAtomic( + OperationPurpose.SNAPSHOT, + blobName, + new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), + true + ) ); - for (String blob : container.listBlobs().keySet()) { + for (String blob : container.listBlobs(OperationPurpose.SNAPSHOT).keySet()) { assertFalse("unexpected temp blob [" + blob + "]", FsBlobContainer.isTempBlobName(blob)); } } diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 7e01a3d714a24..5c35733feedef 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.index.fieldstats; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.QueryBuilders; @@ -95,7 +95,7 @@ private void refreshIndex() { } private void indexDocument(String id, String sValue) { - IndexResponse response = client().prepareIndex("index").setId(id).setSource("s", sValue).get(); + DocWriteResponse response = client().prepareIndex("index").setId(id).setSource("s", sValue).get(); assertThat(response.status(), anyOf(equalTo(RestStatus.OK), equalTo(RestStatus.CREATED))); } } diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 624ad6a9fc7da..b64cd6adab7f3 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -230,7 +231,8 @@ public void testCorruptIndexLatestFile() throws Exception { System.arraycopy(generationBytes, 0, buffer, 0, 8); for (int i = 0; i < 16; i++) { - repository.blobContainer().writeBlob(BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); + repository.blobContainer() + .writeBlob(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); if (i == 8) { assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(generation)); } else { diff --git a/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java index 66eaeb2da9108..b5361a22226d1 100644 --- a/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.ClusterSettings; @@ -242,18 +243,24 @@ public BlobContainer blobContainer(BlobPath path) { final BlobContainer blobContainer = blobStore.blobContainer(path); return new FilterBlobContainer(blobContainer) { @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) - throws IOException { + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) throws IOException { if (canErrorForWriteBlob.get() && randomIntBetween(0, 10) == 0) { writeBlobErrored.set(true); throw new IOException("disk full"); } else { - super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + super.writeBlob(purpose, blobName, inputStream, blobSize, failIfAlreadyExists); } } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -262,7 +269,7 @@ public void writeMetadataBlob( if (shouldErrorForWriteMetadataBlob.get() && blobName.startsWith("snap-")) { throw new RuntimeException("snap file error"); } - super.writeMetadataBlob(blobName, failIfAlreadyExists, atomic, writer); + super.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, atomic, writer); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 868832f61ef9c..279944c1b8347 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClosePointInTimeAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -338,9 +338,9 @@ public void run() { client().prepareIndex("index") .setSource("field", "value") .setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())) - .execute(new ActionListener() { + .execute(new ActionListener() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { semaphore.release(); } @@ -1012,7 +1012,7 @@ public void testCanMatch() throws Exception { ).canMatch() ); // the source can match and can be rewritten to a match_none, but not the alias filter - final IndexResponse response = client().prepareIndex("index").setSource("id", "1").get(); + final DocWriteResponse response = client().prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); searchRequest.indices("alias").source(new SearchSourceBuilder().query(new TermQueryBuilder("id", "1"))); assertFalse( @@ -1704,7 +1704,7 @@ public void testWaitOnRefresh() { searchRequest.setWaitForCheckpointsTimeout(TimeValue.timeValueSeconds(30)); searchRequest.setWaitForCheckpoints(Collections.singletonMap("index", new long[] { 0 })); - final IndexResponse response = client().prepareIndex("index").setSource("id", "1").get(); + final DocWriteResponse response = client().prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); @@ -1737,7 +1737,7 @@ public void testWaitOnRefreshFailsWithRefreshesDisabled() { searchRequest.setWaitForCheckpointsTimeout(TimeValue.timeValueSeconds(30)); searchRequest.setWaitForCheckpoints(Collections.singletonMap("index", new long[] { 0 })); - final IndexResponse response = client().prepareIndex("index").setSource("id", "1").get(); + final DocWriteResponse response = client().prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); @@ -1773,7 +1773,7 @@ public void testWaitOnRefreshFailsIfCheckpointNotIndexed() { searchRequest.setWaitForCheckpointsTimeout(TimeValue.timeValueMillis(randomIntBetween(10, 100))); searchRequest.setWaitForCheckpoints(Collections.singletonMap("index", new long[] { 1 })); - final IndexResponse response = client().prepareIndex("index").setSource("id", "1").get(); + final DocWriteResponse response = client().prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); @@ -1810,7 +1810,7 @@ public void testWaitOnRefreshTimeout() { searchRequest.setWaitForCheckpointsTimeout(TimeValue.timeValueMillis(randomIntBetween(10, 100))); searchRequest.setWaitForCheckpoints(Collections.singletonMap("index", new long[] { 0 })); - final IndexResponse response = client().prepareIndex("index").setSource("id", "1").get(); + final DocWriteResponse response = client().prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java b/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java index a65d50e897d12..f9ec4786d2854 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; @@ -113,7 +114,7 @@ public void testCompressionIsApplied() throws IOException { BlobObj blobObj = new BlobObj(veryRedundantText.toString()); checksumFormat.write(blobObj, blobContainer, "blob-comp", true); checksumFormat.write(blobObj, blobContainer, "blob-not-comp", false); - Map blobs = blobContainer.listBlobsByPrefix("blob-"); + Map blobs = blobContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "blob-"); assertEquals(blobs.size(), 2); assertThat(blobs.get("blob-not-comp").length(), greaterThan(blobs.get("blob-comp").length())); } @@ -146,8 +147,8 @@ protected BlobStore createTestBlobStore() throws IOException { } protected void randomCorruption(BlobContainer blobContainer, String blobName) throws IOException { - final byte[] buffer = new byte[(int) blobContainer.listBlobsByPrefix(blobName).get(blobName).length()]; - try (InputStream inputStream = blobContainer.readBlob(blobName)) { + final byte[] buffer = new byte[(int) blobContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, blobName).get(blobName).length()]; + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, blobName)) { Streams.readFully(inputStream, buffer); } final BytesArray corruptedBytes; @@ -163,7 +164,7 @@ protected void randomCorruption(BlobContainer blobContainer, String blobName) th // another sequence of 8 zero bytes anywhere in the file, let alone such a sequence followed by a correct checksum. corruptedBytes = new BytesArray(buffer, 0, location); } - blobContainer.writeBlob(blobName, corruptedBytes, false); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, blobName, corruptedBytes, false); } } diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index 53ad4188b6ada..191ce130805a8 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -238,7 +238,7 @@ public void testClosesChannelOnErrorInHandshake() throws Exception { mockAppender.addExpectation( new MockLogAppender.SeenEventExpectation( "expected message", - InboundHandler.class.getCanonicalName(), + EXPECTED_LOGGER_NAME, Level.WARN, "error processing handshake version" ) @@ -275,6 +275,12 @@ public void testClosesChannelOnErrorInHandshake() throws Exception { } } + /** + * This logger is mentioned in the docs by name, so we cannot rename it without adjusting the docs. Thus we fix the expected logger + * name in this string constant rather than using {@code InboundHandler.class.getCanonicalName()}. + */ + private static final String EXPECTED_LOGGER_NAME = "org.elasticsearch.transport.InboundHandler"; + public void testLogsSlowInboundProcessing() throws Exception { final MockLogAppender mockAppender = new MockLogAppender(); mockAppender.start(); @@ -286,12 +292,7 @@ public void testLogsSlowInboundProcessing() throws Exception { final TransportVersion remoteVersion = TransportVersion.current(); mockAppender.addExpectation( - new MockLogAppender.SeenEventExpectation( - "expected slow request", - InboundHandler.class.getCanonicalName(), - Level.WARN, - "handling request " - ) + new MockLogAppender.SeenEventExpectation("expected slow request", EXPECTED_LOGGER_NAME, Level.WARN, "handling request ") ); final long requestId = randomNonNegativeLong(); @@ -318,12 +319,7 @@ public void testLogsSlowInboundProcessing() throws Exception { mockAppender.assertAllExpectationsMatched(); mockAppender.addExpectation( - new MockLogAppender.SeenEventExpectation( - "expected slow response", - InboundHandler.class.getCanonicalName(), - Level.WARN, - "handling response " - ) + new MockLogAppender.SeenEventExpectation("expected slow response", EXPECTED_LOGGER_NAME, Level.WARN, "handling response ") ); final long responseId = randomNonNegativeLong(); diff --git a/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java index c47ea906f38df..9896dbf4a861b 100644 --- a/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java @@ -317,16 +317,17 @@ public void onResponseSent(long requestId, String action, Exception error) { assertEquals("header_value", header.getHeaders().v1().get("header")); } + /** + * This logger is mentioned in the docs by name, so we cannot rename it without adjusting the docs. Thus we fix the expected logger + * name in this string constant rather than using {@code OutboundHandler.class.getCanonicalName()}. + */ + private static final String EXPECTED_LOGGER_NAME = "org.elasticsearch.transport.OutboundHandler"; + public void testSlowLogOutboundMessage() throws Exception { final MockLogAppender mockAppender = new MockLogAppender(); mockAppender.start(); mockAppender.addExpectation( - new MockLogAppender.SeenEventExpectation( - "expected message", - OutboundHandler.class.getCanonicalName(), - Level.WARN, - "sending transport message " - ) + new MockLogAppender.SeenEventExpectation("expected message", EXPECTED_LOGGER_NAME, Level.WARN, "sending transport message ") ); final Logger outboundHandlerLogger = LogManager.getLogger(OutboundHandler.class); Loggers.addAppender(outboundHandlerLogger, mockAppender); diff --git a/test/external-modules/latency-simulating-directory/src/main/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepository.java b/test/external-modules/latency-simulating-directory/src/main/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepository.java index c184dca3887bd..937bdf098cb56 100644 --- a/test/external-modules/latency-simulating-directory/src/main/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepository.java +++ b/test/external-modules/latency-simulating-directory/src/main/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepository.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -52,8 +53,8 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - fsBlobStore.deleteBlobsIgnoringIfNotExists(blobNames); + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + fsBlobStore.deleteBlobsIgnoringIfNotExists(purpose, blobNames); } @Override @@ -70,15 +71,15 @@ private class LatencySimulatingBlobContainer extends FilterBlobContainer { } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { simulator.run(); - return super.readBlob(blobName); + return super.readBlob(purpose, blobName); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { simulator.run(); - return super.readBlob(blobName, position, length); + return super.readBlob(purpose, blobName, position, length); } @Override diff --git a/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java b/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java index 6593bce0944fd..5cbfd750498f6 100644 --- a/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java +++ b/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java @@ -25,6 +25,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; public class GeoIpHttpFixture extends ExternalResource { @@ -106,15 +107,25 @@ protected void after() { } private void copyFiles() throws Exception { - Files.copy(GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-ASN.tgz"), source.resolve("GeoLite2-ASN.tgz")); - Files.copy(GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-City.mmdb"), source.resolve("GeoLite2-City.mmdb")); + Files.copy( + GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-ASN.tgz"), + source.resolve("GeoLite2-ASN.tgz"), + StandardCopyOption.REPLACE_EXISTING + ); + Files.copy( + GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-City.mmdb"), + source.resolve("GeoLite2-City.mmdb"), + StandardCopyOption.REPLACE_EXISTING + ); Files.copy( GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-Country.mmdb"), - source.resolve("GeoLite2-Country.mmdb") + source.resolve("GeoLite2-Country.mmdb"), + StandardCopyOption.REPLACE_EXISTING ); Files.copy( GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/MyCustomGeoLite2-City.mmdb"), - source.resolve("MyCustomGeoLite2-City.mmdb") + source.resolve("MyCustomGeoLite2-City.mmdb"), + StandardCopyOption.REPLACE_EXISTING ); new GeoIpCli().main( diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 02e8cfd7f16fc..0a0592b5a01f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -678,6 +678,9 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu while (true) { long old = used.get(); long total = old + bytes; + if (total < 0) { + throw new AssertionError("total must be >= 0 but was [" + total + "]"); + } if (total > max.getBytes()) { throw new CircuitBreakingException(ERROR_MESSAGE, bytes, max.getBytes(), Durability.TRANSIENT); } @@ -689,7 +692,10 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu @Override public void addWithoutBreaking(long bytes) { - used.addAndGet(bytes); + long total = used.addAndGet(bytes); + if (total < 0) { + throw new AssertionError("total must be >= 0 but was [" + total + "]"); + } } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java b/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java index 15ffa52569d00..bd5f974a5f800 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java @@ -15,6 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerStats; import org.elasticsearch.test.ESTestCase; +import java.util.concurrent.atomic.AtomicLong; + /** * {@link CircuitBreakerService} that fails one twentieth of the time when you * add bytes. This is useful to make sure code responds sensibly to circuit @@ -27,31 +29,32 @@ public class CrankyCircuitBreakerService extends CircuitBreakerService { public static final String ERROR_MESSAGE = "cranky breaker"; private final CircuitBreaker breaker = new CircuitBreaker() { - @Override - public void circuitBreak(String fieldName, long bytesNeeded) { + private final AtomicLong used = new AtomicLong(); - } + @Override + public void circuitBreak(String fieldName, long bytesNeeded) {} @Override public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { if (ESTestCase.random().nextInt(20) == 0) { throw new CircuitBreakingException(ERROR_MESSAGE, Durability.PERMANENT); } + used.addAndGet(bytes); } @Override public void addWithoutBreaking(long bytes) { - + used.addAndGet(bytes); } @Override public long getUsed() { - return 0; + return used.get(); } @Override public long getLimit() { - return 0; + return Long.MAX_VALUE; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java index 13b7407783ba7..38a44bae48543 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.Settings; @@ -68,7 +69,9 @@ public void tearDown() throws Exception { private void deleteAndAssertEmpty(BlobPath path) { final BlobStoreRepository repo = getRepository(); final PlainActionFuture future = PlainActionFuture.newFuture(); - repo.threadPool().generic().execute(ActionRunnable.run(future, () -> repo.blobStore().blobContainer(path).delete())); + repo.threadPool() + .generic() + .execute(ActionRunnable.run(future, () -> repo.blobStore().blobContainer(path).delete(OperationPurpose.SNAPSHOT))); future.actionGet(); final BlobPath parent = path.parent(); if (parent == null) { @@ -121,11 +124,29 @@ public void testListChildren() throws Exception { genericExec.execute(ActionRunnable.run(future, () -> { final BlobStore blobStore = repo.blobStore(); blobStore.blobContainer(repo.basePath().add("foo")) - .writeBlob("nested-blob", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); + .writeBlob( + OperationPurpose.SNAPSHOT, + "nested-blob", + new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), + testBlobLen, + false + ); blobStore.blobContainer(repo.basePath().add("foo").add("nested")) - .writeBlob("bar", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); + .writeBlob( + OperationPurpose.SNAPSHOT, + "bar", + new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), + testBlobLen, + false + ); blobStore.blobContainer(repo.basePath().add("foo").add("nested2")) - .writeBlob("blub", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); + .writeBlob( + OperationPurpose.SNAPSHOT, + "blub", + new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), + testBlobLen, + false + ); })); future.actionGet(); assertChildren(repo.basePath(), Collections.singleton("foo")); @@ -207,9 +228,10 @@ private void createDanglingIndex(final BlobStoreRepository repo, final Executor genericExec.execute(ActionRunnable.run(future, () -> { final BlobStore blobStore = repo.blobStore(); blobStore.blobContainer(repo.basePath().add("indices").add("foo")) - .writeBlob("bar", new ByteArrayInputStream(new byte[3]), 3, false); + .writeBlob(OperationPurpose.SNAPSHOT, "bar", new ByteArrayInputStream(new byte[3]), 3, false); for (String prefix : Arrays.asList("snap-", "meta-")) { - blobStore.blobContainer(repo.basePath()).writeBlob(prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); + blobStore.blobContainer(repo.basePath()) + .writeBlob(OperationPurpose.SNAPSHOT, prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); } })); future.get(); @@ -217,10 +239,10 @@ private void createDanglingIndex(final BlobStoreRepository repo, final Executor final PlainActionFuture corruptionFuture = PlainActionFuture.newFuture(); genericExec.execute(ActionRunnable.supply(corruptionFuture, () -> { final BlobStore blobStore = repo.blobStore(); - return blobStore.blobContainer(repo.basePath().add("indices")).children().containsKey("foo") - && blobStore.blobContainer(repo.basePath().add("indices").add("foo")).blobExists("bar") - && blobStore.blobContainer(repo.basePath()).blobExists("meta-foo.dat") - && blobStore.blobContainer(repo.basePath()).blobExists("snap-foo.dat"); + return blobStore.blobContainer(repo.basePath().add("indices")).children(OperationPurpose.SNAPSHOT).containsKey("foo") + && blobStore.blobContainer(repo.basePath().add("indices").add("foo")).blobExists(OperationPurpose.SNAPSHOT, "bar") + && blobStore.blobContainer(repo.basePath()).blobExists(OperationPurpose.SNAPSHOT, "meta-foo.dat") + && blobStore.blobContainer(repo.basePath()).blobExists(OperationPurpose.SNAPSHOT, "snap-foo.dat"); })); assertTrue(corruptionFuture.get()); } @@ -240,7 +262,9 @@ private Set listChildren(BlobPath path) { final BlobStoreRepository repository = getRepository(); repository.threadPool() .generic() - .execute(ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).children().keySet())); + .execute( + ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).children(OperationPurpose.SNAPSHOT).keySet()) + ); return future.actionGet(); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 0baf13f78ceb2..e23b26c73a811 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -14,6 +14,7 @@ import org.apache.http.ConnectionClosedException; import org.apache.http.HttpStatus; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.ByteSizeValue; @@ -93,9 +94,9 @@ public void testReadNonexistentBlobThrowsNoSuchFileException() { final int length = randomIntBetween(1, Math.toIntExact(Math.min(Integer.MAX_VALUE, MAX_RANGE_VAL - position))); final Exception exception = expectThrows(NoSuchFileException.class, () -> { if (randomBoolean()) { - Streams.readFully(blobContainer.readBlob("read_nonexistent_blob")); + Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob")); } else { - Streams.readFully(blobContainer.readBlob("read_nonexistent_blob", 0, 1)); + Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob", 0, 1)); } }); final String fullBlobPath = blobContainer.path().buildAsString() + "read_nonexistent_blob"; @@ -103,7 +104,7 @@ public void testReadNonexistentBlobThrowsNoSuchFileException() { assertThat( expectThrows( NoSuchFileException.class, - () -> Streams.readFully(blobContainer.readBlob("read_nonexistent_blob", position, length)) + () -> Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob", position, length)) ).getMessage().toLowerCase(Locale.ROOT), containsString("blob object [" + fullBlobPath + "] not found") ); @@ -145,7 +146,7 @@ public void testReadBlobWithRetries() throws Exception { } }); - try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -211,7 +212,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final int position = randomIntBetween(0, bytes.length - 1); final int length = randomIntBetween(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); - try (InputStream inputStream = blobContainer.readBlob("read_range_blob_max_retries", position, length)) { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_range_blob_max_retries", position, length)) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -251,7 +252,7 @@ public void testReadBlobWithReadTimeouts() { Exception exception = expectThrows( unresponsiveExceptionType(), - () -> Streams.readFully(blobContainer.readBlob("read_blob_unresponsive")) + () -> Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_unresponsive")) ); assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); assertThat(exception.getCause(), instanceOf(SocketTimeoutException.class)); @@ -268,8 +269,8 @@ public void testReadBlobWithReadTimeouts() { exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob("read_blob_incomplete") - : blobContainer.readBlob("read_blob_incomplete", position, length) + ? blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete") + : blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete", position, length) ) { Streams.readFully(stream); } @@ -297,9 +298,9 @@ public void testReadBlobWithNoHttpResponse() { Exception exception = expectThrows(unresponsiveExceptionType(), () -> { if (randomBoolean()) { - Streams.readFully(blobContainer.readBlob("read_blob_no_response")); + Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_no_response")); } else { - Streams.readFully(blobContainer.readBlob("read_blob_no_response", 0, 1)); + Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_no_response", 0, 1)); } }); assertThat( @@ -322,8 +323,8 @@ public void testReadBlobWithPrematureConnectionClose() { final Exception exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob("read_blob_incomplete", 0, 1) - : blobContainer.readBlob("read_blob_incomplete") + ? blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete", 0, 1) + : blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete") ) { Streams.readFully(stream); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index d4d7fdcb40c65..cbc6b58cfdb28 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -103,7 +104,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore try { final BlobContainer blobContainer = repository.blobContainer(); final long latestGen; - try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob("index.latest"))) { + try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "index.latest"))) { latestGen = inputStream.readLong(); } catch (NoSuchFileException e) { throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); @@ -111,7 +112,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore assertIndexGenerations(blobContainer, latestGen); final RepositoryData repositoryData; try ( - InputStream blob = blobContainer.readBlob(BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); + InputStream blob = blobContainer.readBlob(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); XContentParser parser = XContentType.JSON.xContent() .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), blob) ) { @@ -152,7 +153,7 @@ public void onFailure(Exception e) { } private static void assertIndexGenerations(BlobContainer repoRoot, long latestGen) throws IOException { - final long[] indexGenerations = repoRoot.listBlobsByPrefix(BlobStoreRepository.INDEX_FILE_PREFIX) + final long[] indexGenerations = repoRoot.listBlobsByPrefix(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_FILE_PREFIX) .keySet() .stream() .map(s -> s.replace(BlobStoreRepository.INDEX_FILE_PREFIX, "")) @@ -164,12 +165,12 @@ private static void assertIndexGenerations(BlobContainer repoRoot, long latestGe } private static void assertShardIndexGenerations(BlobContainer repoRoot, ShardGenerations shardGenerations) throws IOException { - final BlobContainer indicesContainer = repoRoot.children().get("indices"); + final BlobContainer indicesContainer = repoRoot.children(OperationPurpose.SNAPSHOT).get("indices"); for (IndexId index : shardGenerations.indices()) { final List gens = shardGenerations.getGens(index); if (gens.isEmpty() == false) { - final BlobContainer indexContainer = indicesContainer.children().get(index.getId()); - final Map shardContainers = indexContainer.children(); + final BlobContainer indexContainer = indicesContainer.children(OperationPurpose.SNAPSHOT).get(index.getId()); + final Map shardContainers = indexContainer.children(OperationPurpose.SNAPSHOT); for (int i = 0; i < gens.size(); i++) { final ShardGeneration generation = gens.get(i); assertThat(generation, not(ShardGenerations.DELETED_SHARD_GEN)); @@ -177,7 +178,8 @@ private static void assertShardIndexGenerations(BlobContainer repoRoot, ShardGen final String shardId = Integer.toString(i); assertThat(shardContainers, hasKey(shardId)); assertThat( - shardContainers.get(shardId).listBlobsByPrefix(BlobStoreRepository.INDEX_FILE_PREFIX), + shardContainers.get(shardId) + .listBlobsByPrefix(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_FILE_PREFIX), hasKey(BlobStoreRepository.INDEX_FILE_PREFIX + generation) ); } @@ -188,13 +190,13 @@ private static void assertShardIndexGenerations(BlobContainer repoRoot, ShardGen private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryData repositoryData) throws IOException { final List expectedIndexUUIDs = repositoryData.getIndices().values().stream().map(IndexId::getId).toList(); - final BlobContainer indicesContainer = repository.blobContainer().children().get("indices"); + final BlobContainer indicesContainer = repository.blobContainer().children(OperationPurpose.SNAPSHOT).get("indices"); final List foundIndexUUIDs; if (indicesContainer == null) { foundIndexUUIDs = Collections.emptyList(); } else { // Skip Lucene MockFS extraN directory - foundIndexUUIDs = indicesContainer.children() + foundIndexUUIDs = indicesContainer.children(OperationPurpose.SNAPSHOT) .keySet() .stream() .filter(s -> s.startsWith("extra") == false) @@ -202,9 +204,9 @@ private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryD } assertThat(foundIndexUUIDs, containsInAnyOrder(expectedIndexUUIDs.toArray(Strings.EMPTY_ARRAY))); for (String indexId : foundIndexUUIDs) { - final Set indexMetaGenerationsFound = indicesContainer.children() + final Set indexMetaGenerationsFound = indicesContainer.children(OperationPurpose.SNAPSHOT) .get(indexId) - .listBlobsByPrefix(BlobStoreRepository.METADATA_PREFIX) + .listBlobsByPrefix(OperationPurpose.SNAPSHOT, BlobStoreRepository.METADATA_PREFIX) .keySet() .stream() .map(p -> p.replace(BlobStoreRepository.METADATA_PREFIX, "").replace(".dat", "")) @@ -229,7 +231,7 @@ private static void assertSnapshotUUIDs( final Collection snapshotIds = repositoryData.getSnapshotIds(); final List expectedSnapshotUUIDs = snapshotIds.stream().map(SnapshotId::getUUID).toList(); for (String prefix : new String[] { BlobStoreRepository.SNAPSHOT_PREFIX, BlobStoreRepository.METADATA_PREFIX }) { - final Collection foundSnapshotUUIDs = repoRoot.listBlobs() + final Collection foundSnapshotUUIDs = repoRoot.listBlobs(OperationPurpose.SNAPSHOT) .keySet() .stream() .filter(p -> p.startsWith(prefix)) @@ -238,12 +240,12 @@ private static void assertSnapshotUUIDs( assertThat(foundSnapshotUUIDs, containsInAnyOrder(expectedSnapshotUUIDs.toArray(Strings.EMPTY_ARRAY))); } - final BlobContainer indicesContainer = repository.getBlobContainer().children().get("indices"); + final BlobContainer indicesContainer = repository.getBlobContainer().children(OperationPurpose.SNAPSHOT).get("indices"); final Map indices; if (indicesContainer == null) { indices = Collections.emptyMap(); } else { - indices = indicesContainer.children(); + indices = indicesContainer.children(OperationPurpose.SNAPSHOT); } if (snapshotIds.isEmpty()) { listener.onResponse(null); @@ -296,7 +298,7 @@ private static void assertSnapshotInfosConsistency( assertThat(indices, hasKey(indexId.getId())); final BlobContainer indexContainer = indices.get(indexId.getId()); assertThat( - indexContainer.listBlobs(), + indexContainer.listBlobs(OperationPurpose.SNAPSHOT), hasKey( String.format( Locale.ROOT, @@ -306,7 +308,7 @@ private static void assertSnapshotInfosConsistency( ) ); final IndexMetadata indexMetadata = repository.getSnapshotIndexMetaData(repositoryData, snapshotId, indexId); - for (Map.Entry entry : indexContainer.children().entrySet()) { + for (Map.Entry entry : indexContainer.children(OperationPurpose.SNAPSHOT).entrySet()) { // Skip Lucene MockFS extraN directory if (entry.getKey().startsWith("extra")) { continue; @@ -320,7 +322,10 @@ private static void assertSnapshotInfosConsistency( final BlobContainer shardContainer = entry.getValue(); // TODO: we shouldn't be leaking empty shard directories when a shard (but not all of the index it belongs to) // becomes unreferenced. We should fix that and remove this conditional once its fixed. - if (shardContainer.listBlobs().keySet().stream().anyMatch(blob -> blob.startsWith("extra") == false)) { + if (shardContainer.listBlobs(OperationPurpose.SNAPSHOT) + .keySet() + .stream() + .anyMatch(blob -> blob.startsWith("extra") == false)) { final int impliedCount = shardId - 1; maxShardCountsSeen.compute( indexId, @@ -331,7 +336,7 @@ private static void assertSnapshotInfosConsistency( && snapshotInfo.shardFailures() .stream() .noneMatch(shardFailure -> shardFailure.index().equals(index) && shardFailure.shardId() == shardId)) { - final Map shardPathContents = shardContainer.listBlobs(); + final Map shardPathContents = shardContainer.listBlobs(OperationPurpose.SNAPSHOT); assertThat( shardPathContents, hasKey(String.format(Locale.ROOT, BlobStoreRepository.SNAPSHOT_NAME_FORMAT, snapshotId.getUUID())) @@ -370,7 +375,12 @@ public static void assertBlobsByPrefix(BlobStoreRepository repository, BlobPath final PlainActionFuture> future = PlainActionFuture.newFuture(); repository.threadPool() .generic() - .execute(ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).listBlobsByPrefix(prefix))); + .execute( + ActionRunnable.supply( + future, + () -> repository.blobStore().blobContainer(path).listBlobsByPrefix(OperationPurpose.SNAPSHOT, prefix) + ) + ); Map foundBlobs = future.actionGet(); if (blobs.isEmpty()) { assertThat(foundBlobs.keySet(), empty()); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index d1b211128c108..e2dee9e154391 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -111,7 +112,7 @@ public void testReadNonExistingPath() throws IOException { try (BlobStore store = newBlobStore()) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); expectThrows(NoSuchFileException.class, () -> { - try (InputStream is = container.readBlob("non-existing")) { + try (InputStream is = container.readBlob(OperationPurpose.SNAPSHOT, "non-existing")) { is.read(); } }); @@ -128,7 +129,7 @@ public void testWriteRead() throws IOException { data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "foobar", new BytesArray(data), false); } - try (InputStream stream = container.readBlob("foobar")) { + try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, "foobar")) { BytesRefBuilder target = new BytesRefBuilder(); while (target.length() < data.length) { byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())]; @@ -143,14 +144,14 @@ public void testWriteRead() throws IOException { assertEquals(data.length, target.length()); assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length())); } - container.delete(); + container.delete(OperationPurpose.SNAPSHOT); } } public void testList() throws IOException { try (BlobStore store = newBlobStore()) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); - assertThat(container.listBlobs().size(), CoreMatchers.equalTo(0)); + assertThat(container.listBlobs(OperationPurpose.SNAPSHOT).size(), CoreMatchers.equalTo(0)); int numberOfFooBlobs = randomIntBetween(0, 10); int numberOfBarBlobs = randomIntBetween(3, 20); Map generatedBlobs = new HashMap<>(); @@ -171,7 +172,7 @@ public void testList() throws IOException { generatedBlobs.put(name, (long) length); writeRandomBlob(container, name, length); - Map blobs = container.listBlobs(); + Map blobs = container.listBlobs(OperationPurpose.SNAPSHOT); assertThat(blobs.size(), CoreMatchers.equalTo(numberOfFooBlobs + numberOfBarBlobs)); for (Map.Entry generated : generatedBlobs.entrySet()) { BlobMetadata blobMetadata = blobs.get(generated.getKey()); @@ -180,10 +181,10 @@ public void testList() throws IOException { assertThat(blobMetadata.length(), CoreMatchers.equalTo(blobLengthFromContentLength(generated.getValue()))); } - assertThat(container.listBlobsByPrefix("foo-").size(), CoreMatchers.equalTo(numberOfFooBlobs)); - assertThat(container.listBlobsByPrefix("bar-").size(), CoreMatchers.equalTo(numberOfBarBlobs)); - assertThat(container.listBlobsByPrefix("baz-").size(), CoreMatchers.equalTo(0)); - container.delete(); + assertThat(container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "foo-").size(), CoreMatchers.equalTo(numberOfFooBlobs)); + assertThat(container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "bar-").size(), CoreMatchers.equalTo(numberOfBarBlobs)); + assertThat(container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "baz-").size(), CoreMatchers.equalTo(0)); + container.delete(OperationPurpose.SNAPSHOT); } } @@ -191,16 +192,18 @@ public void testDeleteBlobs() throws IOException { try (BlobStore store = newBlobStore()) { final List blobNames = Arrays.asList("foobar", "barfoo"); final BlobContainer container = store.blobContainer(BlobPath.EMPTY); - container.deleteBlobsIgnoringIfNotExists(blobNames.iterator()); // does not raise when blobs don't exist + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames.iterator()); // does not raise when blobs + // don't exist byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); final BytesArray bytesArray = new BytesArray(data); for (String blobName : blobNames) { writeBlob(container, blobName, bytesArray, randomBoolean()); } - assertEquals(container.listBlobs().size(), 2); - container.deleteBlobsIgnoringIfNotExists(blobNames.iterator()); - assertTrue(container.listBlobs().isEmpty()); - container.deleteBlobsIgnoringIfNotExists(blobNames.iterator()); // does not raise when blobs don't exist + assertEquals(container.listBlobs(OperationPurpose.SNAPSHOT).size(), 2); + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames.iterator()); + assertTrue(container.listBlobs(OperationPurpose.SNAPSHOT).isEmpty()); + container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames.iterator()); // does not raise when blobs + // don't exist } } @@ -211,9 +214,9 @@ public static void writeBlob( boolean failIfAlreadyExists ) throws IOException { if (randomBoolean()) { - container.writeBlob(blobName, bytesArray, failIfAlreadyExists); + container.writeBlob(OperationPurpose.SNAPSHOT, blobName, bytesArray, failIfAlreadyExists); } else { - container.writeBlobAtomic(blobName, bytesArray, failIfAlreadyExists); + container.writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, bytesArray, failIfAlreadyExists); } } @@ -229,10 +232,10 @@ public void testContainerCreationAndDeletion() throws IOException { assertArrayEquals(readBlobFully(containerFoo, "test", data1.length), data1); assertArrayEquals(readBlobFully(containerBar, "test", data2.length), data2); - assertTrue(containerFoo.blobExists("test")); - assertTrue(containerBar.blobExists("test")); - containerBar.delete(); - containerFoo.delete(); + assertTrue(containerFoo.blobExists(OperationPurpose.SNAPSHOT, "test")); + assertTrue(containerBar.blobExists(OperationPurpose.SNAPSHOT, "test")); + containerBar.delete(OperationPurpose.SNAPSHOT); + containerFoo.delete(OperationPurpose.SNAPSHOT); } } @@ -244,7 +247,7 @@ public static byte[] writeRandomBlob(BlobContainer container, String name, int l public static byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException { byte[] data = new byte[length]; - try (InputStream inputStream = container.readBlob(name)) { + try (InputStream inputStream = container.readBlob(OperationPurpose.SNAPSHOT, name)) { assertThat(Streams.readFully(inputStream, data), CoreMatchers.equalTo(length)); assertThat(inputStream.read(), CoreMatchers.equalTo(-1)); } @@ -260,7 +263,7 @@ public static byte[] randomBytes(int length) { } protected static void writeBlob(BlobContainer container, String blobName, BytesArray bytesArray) throws IOException { - container.writeBlob(blobName, bytesArray, true); + container.writeBlob(OperationPurpose.SNAPSHOT, blobName, bytesArray, true); } protected BlobStore newBlobStore() { @@ -476,7 +479,7 @@ public void testIndicesDeletedFromRepository() throws Exception { for (IndexId indexId : repositoryData.actionGet().getIndices().values()) { if (indexId.getName().equals("test-idx-3")) { - assertFalse(indicesBlobContainer.get().blobExists(indexId.getId())); // deleted index + assertFalse(indicesBlobContainer.get().blobExists(OperationPurpose.SNAPSHOT, indexId.getId())); // deleted index } } @@ -495,7 +498,7 @@ public void testBlobStoreBulkDeletion() throws Exception { for (int j = 0; j < numberOfBlobsPerContainer; j++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); - container.writeBlob(blobName, new BytesArray(bytes), false); + container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new BytesArray(bytes), false); if (randomBoolean()) { blobsToDelete.add(containerPath.buildAsString() + blobName); } else { @@ -504,14 +507,14 @@ public void testBlobStoreBulkDeletion() throws Exception { } } - store.deleteBlobsIgnoringIfNotExists(blobsToDelete.iterator()); + store.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobsToDelete.iterator()); for (var containerEntry : expectedBlobsPerContainer.entrySet()) { BlobContainer blobContainer = store.blobContainer(containerEntry.getKey()); - Map blobsInContainer = blobContainer.listBlobs(); + Map blobsInContainer = blobContainer.listBlobs(OperationPurpose.SNAPSHOT); for (String expectedBlob : containerEntry.getValue()) { assertThat(blobsInContainer, hasKey(expectedBlob)); } - blobContainer.delete(); + blobContainer.delete(OperationPurpose.SNAPSHOT); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java index e0fd9d125134b..fd207a59e563f 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.IOUtils; @@ -113,7 +114,7 @@ public void testReadOnly() throws Exception { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "test", new BytesArray(data)); assertArrayEquals(readBlobFully(container, "test", data.length), data); - assertTrue(container.blobExists("test")); + assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "test")); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java b/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java index 926f9dc2b2a8a..f6811de12bb60 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java @@ -10,9 +10,11 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import java.io.IOException; import java.util.Iterator; +import java.util.Map; public class BlobStoreWrapper implements BlobStore { @@ -28,8 +30,8 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - delegate.deleteBlobsIgnoringIfNotExists(blobNames); + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + delegate.deleteBlobsIgnoringIfNotExists(purpose, blobNames); } @Override @@ -37,6 +39,11 @@ public void close() throws IOException { delegate.close(); } + @Override + public Map stats() { + return delegate.stats(); + } + protected BlobStore delegate() { return delegate; } diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index fd6c8554a1860..d5b59ef3274ea 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.fs.FsBlobContainer; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; @@ -526,7 +527,7 @@ protected BlobContainer wrapChild(BlobContainer child) { } @Override - public InputStream readBlob(String name) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name) throws IOException { if (blockOnReadIndexMeta && name.startsWith(BlobStoreRepository.METADATA_PREFIX) && path().equals(basePath()) == false) { blockExecutionAndMaybeWait(name); } else if (path().equals(basePath()) @@ -537,70 +538,76 @@ public InputStream readBlob(String name) throws IOException { maybeReadErrorAfterBlock(name); maybeIOExceptionOrBlock(name); } - return super.readBlob(name); + return super.readBlob(purpose, name); } @Override - public InputStream readBlob(String name, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name, long position, long length) throws IOException { maybeReadErrorAfterBlock(name); maybeIOExceptionOrBlock(name); - return super.readBlob(name, position, length); + return super.readBlob(purpose, name, position, length); } @Override - public DeleteResult delete() throws IOException { + public DeleteResult delete(OperationPurpose purpose) throws IOException { DeleteResult deleteResult = DeleteResult.ZERO; - for (BlobContainer child : children().values()) { - deleteResult = deleteResult.add(child.delete()); + for (BlobContainer child : children(purpose).values()) { + deleteResult = deleteResult.add(child.delete(purpose)); } - final Map blobs = listBlobs(); + final Map blobs = listBlobs(purpose); long deleteBlobCount = blobs.size(); long deleteByteCount = 0L; for (String blob : blobs.values().stream().map(BlobMetadata::name).toList()) { maybeIOExceptionOrBlock(blob); - deleteBlobsIgnoringIfNotExists(Iterators.single(blob)); + deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(blob)); deleteByteCount += blobs.get(blob).length(); } blobStore().blobContainer(path().parent()) - .deleteBlobsIgnoringIfNotExists(Iterators.single(path().parts().get(path().parts().size() - 1))); + .deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(path().parts().get(path().parts().size() - 1))); return deleteResult.add(deleteBlobCount, deleteByteCount); } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { final List names = new ArrayList<>(); blobNames.forEachRemaining(names::add); if (blockOnDeleteIndexN && names.stream().anyMatch(name -> name.startsWith(BlobStoreRepository.INDEX_FILE_PREFIX))) { blockExecutionAndMaybeWait("index-{N}"); } - super.deleteBlobsIgnoringIfNotExists(names.iterator()); + super.deleteBlobsIgnoringIfNotExists(purpose, names.iterator()); } @Override - public Map listBlobs() throws IOException { + public Map listBlobs(OperationPurpose purpose) throws IOException { maybeIOExceptionOrBlock(""); - return super.listBlobs(); + return super.listBlobs(purpose); } @Override - public Map children() throws IOException { + public Map children(OperationPurpose purpose) throws IOException { final Map res = new HashMap<>(); - for (Map.Entry entry : super.children().entrySet()) { + for (Map.Entry entry : super.children(purpose).entrySet()) { res.put(entry.getKey(), new MockBlobContainer(entry.getValue())); } return res; } @Override - public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { maybeIOExceptionOrBlock(blobNamePrefix); - return super.listBlobsByPrefix(blobNamePrefix); + return super.listBlobsByPrefix(purpose, blobNamePrefix); } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) throws IOException { beforeWrite(blobName); - super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + super.writeBlob(purpose, blobName, inputStream, blobSize, failIfAlreadyExists); if (RandomizedContext.current().getRandom().nextBoolean()) { // for network based repositories, the blob may have been written but we may still // get an error with the client connection, so an IOException here simulates this @@ -610,6 +617,7 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -620,7 +628,7 @@ public void writeMetadataBlob( } else { beforeWrite(blobName); } - super.writeMetadataBlob(blobName, failIfAlreadyExists, atomic, writer); + super.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, atomic, writer); if (RandomizedContext.current().getRandom().nextBoolean()) { // for network based repositories, the blob may have been written but we may still // get an error with the client connection, so an IOException here simulates this @@ -640,21 +648,25 @@ private void beforeWrite(String blobName) throws IOException { } @Override - public void writeBlobAtomic(final String blobName, final BytesReference bytes, final boolean failIfAlreadyExists) - throws IOException { + public void writeBlobAtomic( + final OperationPurpose purpose, + final String blobName, + final BytesReference bytes, + final boolean failIfAlreadyExists + ) throws IOException { final Random random = beforeAtomicWrite(blobName); if ((delegate() instanceof FsBlobContainer) && (random.nextBoolean())) { // Simulate a failure between the write and move operation in FsBlobContainer final String tempBlobName = FsBlobContainer.tempBlobName(blobName); - super.writeBlob(tempBlobName, bytes, failIfAlreadyExists); + super.writeBlob(purpose, tempBlobName, bytes, failIfAlreadyExists); maybeIOExceptionOrBlock(blobName); final FsBlobContainer fsBlobContainer = (FsBlobContainer) delegate(); - fsBlobContainer.moveBlobAtomic(tempBlobName, blobName, failIfAlreadyExists); + fsBlobContainer.moveBlobAtomic(purpose, tempBlobName, blobName, failIfAlreadyExists); } else { // Atomic write since it is potentially supported // by the delegating blob container maybeIOExceptionOrBlock(blobName); - super.writeBlobAtomic(blobName, bytes, failIfAlreadyExists); + super.writeBlobAtomic(purpose, blobName, bytes, failIfAlreadyExists); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index 9829e40088829..d23b79ed0cde2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -14,11 +14,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkShardRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.TimeValue; @@ -174,7 +174,7 @@ public void run() { id = idGenerator.incrementAndGet(); if (useAutoGeneratedIDs) { try { - IndexResponse indexResponse = client.prepareIndex(index) + DocWriteResponse indexResponse = client.prepareIndex(index) .setTimeout(timeout) .setSource(generateSource(id, threadRandom)) .get(); @@ -187,7 +187,7 @@ public void run() { } } else { try { - IndexResponse indexResponse = client.prepareIndex(index) + DocWriteResponse indexResponse = client.prepareIndex(index) .setId(Long.toString(id)) .setTimeout(timeout) .setSource(generateSource(id, threadRandom)) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 7711164eebf75..4956985c78a97 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -47,7 +47,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -1393,7 +1392,7 @@ protected void ensureFullyConnectedCluster() { * client().prepareIndex(index).setSource(source).execute().actionGet(); * */ - protected final IndexResponse index(String index, XContentBuilder source) { + protected final DocWriteResponse index(String index, XContentBuilder source) { return client().prepareIndex(index).setSource(source).execute().actionGet(); } @@ -1403,11 +1402,11 @@ protected final IndexResponse index(String index, XContentBuilder source) { * client().prepareIndex(index).setSource(source).execute().actionGet(); * */ - protected final IndexResponse index(String index, String id, Map source) { + protected final DocWriteResponse index(String index, String id, Map source) { return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } - protected final ActionFuture startIndex(String index, String id, BytesReference source, XContentType type) { + protected final ActionFuture startIndex(String index, String id, BytesReference source, XContentType type) { return client().prepareIndex(index).setId(id).setSource(source, type).execute(); } @@ -1417,7 +1416,7 @@ protected final ActionFuture startIndex(String index, String id, * return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); * */ - protected final IndexResponse index(String index, String id, XContentBuilder source) { + protected final DocWriteResponse index(String index, String id, XContentBuilder source) { return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } @@ -1427,7 +1426,7 @@ protected final IndexResponse index(String index, String id, XContentBuilder sou * return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); * */ - protected final IndexResponse indexDoc(String index, String id, Object... source) { + protected final DocWriteResponse indexDoc(String index, String id, Object... source) { return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } @@ -1439,7 +1438,7 @@ protected final IndexResponse indexDoc(String index, String id, Object... source *

* where source is a JSON String. */ - protected final IndexResponse index(String index, String id, String source) { + protected final DocWriteResponse index(String index, String id, String source) { return client().prepareIndex(index).setId(id).setSource(source, XContentType.JSON).execute().actionGet(); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java index 2e76da1a519bd..88e039b6013e4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java @@ -88,7 +88,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { double previousKey = Double.NEGATIVE_INFINITY; while (sketch.next()) { final double value = sketch.value(); - final int count = sketch.count(); + final long count = sketch.count(); double key = Math.floor((value - offset) / interval); assert key >= previousKey; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java index b2be65a9e2901..2db972115767e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java @@ -134,7 +134,7 @@ public void collect(int doc, long bucket) throws IOException { previousValue = value; // Collecting the bucket automatically increments the count by the docCountProvider, // account for that here - final int count = sketch.count() - docCountProvider.getDocCount(doc); + final long count = sketch.count() - docCountProvider.getDocCount(doc); lo = HistoBackedRangeAggregator.this.collect(sub, doc, value, bucket, lo, count); } } @@ -142,7 +142,7 @@ public void collect(int doc, long bucket) throws IOException { }; } - abstract int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, int count) + abstract int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, long count) throws IOException; private static class NoOverlap extends HistoBackedRangeAggregator { @@ -178,7 +178,7 @@ private NoOverlap( } @Override - public int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, int count) + public int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, long count) throws IOException { int lo = lowBound, hi = ranges.length - 1; while (lo <= hi) { @@ -240,7 +240,7 @@ private static class Overlap extends HistoBackedRangeAggregator { } @Override - public int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, int count) + public int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, long count) throws IOException { int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes int mid = (lo + hi) >>> 1; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index 08fad5dd3b83c..a06eb509d2539 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -295,7 +296,7 @@ public void parse(DocumentParserContext context) throws IOException { return; } ArrayList values = null; - ArrayList counts = null; + ArrayList counts = null; // should be an object ensureExpectedToken(XContentParser.Token.START_OBJECT, token, context.parser()); subParser = new XContentSubParser(context.parser()); @@ -343,7 +344,7 @@ public void parse(DocumentParserContext context) throws IOException { while (token != XContentParser.Token.END_ARRAY) { // should be a number ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, subParser); - counts.add(subParser.intValue()); + counts.add(subParser.longValue()); token = subParser.nextToken(); } } else { @@ -385,7 +386,7 @@ public void parse(DocumentParserContext context) throws IOException { } BytesStreamOutput streamOutput = new BytesStreamOutput(); for (int i = 0; i < values.size(); i++) { - int count = counts.get(i); + long count = counts.get(i); if (count < 0) { throw new DocumentParsingException( subParser.getTokenLocation(), @@ -393,7 +394,11 @@ public void parse(DocumentParserContext context) throws IOException { ); } else if (count > 0) { // we do not add elements with count == 0 - streamOutput.writeVInt(count); + if (streamOutput.getTransportVersion().onOrAfter(TransportVersions.LONG_COUNT_IN_HISTOGRAM_ADDED)) { + streamOutput.writeVLong(count); + } else { + streamOutput.writeVInt(Math.toIntExact(count)); + } streamOutput.writeLong(Double.doubleToRawLongBits(values.get(i))); } } @@ -431,7 +436,7 @@ public void parse(DocumentParserContext context) throws IOException { /** re-usable {@link HistogramValue} implementation */ private static class InternalHistogramValue extends HistogramValue { double value; - int count; + long count; boolean isExhausted; final ByteArrayStreamInput streamInput; @@ -450,7 +455,11 @@ void reset(BytesRef bytesRef) { @Override public boolean next() throws IOException { if (streamInput.available() > 0) { - count = streamInput.readVInt(); + if (streamInput.getTransportVersion().onOrAfter(TransportVersions.LONG_COUNT_IN_HISTOGRAM_ADDED)) { + count = streamInput.readVLong(); + } else { + count = streamInput.readVInt(); + } value = Double.longBitsToDouble(streamInput.readLong()); return true; } @@ -467,7 +476,7 @@ public double value() { } @Override - public int count() { + public long count() { if (isExhausted) { throw new IllegalArgumentException("histogram already exhausted"); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java index 9be00cacd0e09..2892ada15fec9 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java @@ -281,8 +281,8 @@ public void testCountIsLong() throws Exception { .field("values", new double[] { 2, 2, 3 }) .endObject() ); - Exception e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source)); - assertThat(e.getCause().getMessage(), containsString(" out of range of int")); + ParsedDocument doc = mapper.parse(source); + assertThat(doc.rootDoc().getField("field"), notNullValue()); } public void testValuesNotInOrder() throws Exception { diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index 3f27cb6b3a458..c5bdc719f9dcb 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -182,6 +182,7 @@ public void testClusterDetailsAfterSuccessfulCCS() throws Exception { waitForSearchTasksToFinish(); { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertFalse(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertFalse("search cluster results should NOT be marked as partial", clusters.hasPartialResults()); @@ -216,6 +217,7 @@ public void testClusterDetailsAfterSuccessfulCCS() throws Exception { // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertFalse(statusResponse.isPartial()); SearchResponse.Clusters clusters = statusResponse.getClusters(); assertFalse("search cluster results should NOT be marked as partial", clusters.hasPartialResults()); @@ -300,6 +302,7 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); assertNotNull(finishedResponse); + assertFalse(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertFalse("search cluster results should NOT be marked as partial", clusters.hasPartialResults()); @@ -328,6 +331,50 @@ public void testCCSClusterDetailsWhereAllShardsSkippedInCanMatch() throws Except assertThat(localClusterSearchInfo.getFailures().size(), equalTo(0)); assertThat(localClusterSearchInfo.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.SUCCESSFUL)); + assertThat(remoteClusterSearchInfo.getTotalShards(), equalTo(remoteNumShards)); + assertThat(remoteClusterSearchInfo.getSuccessfulShards(), equalTo(remoteNumShards)); + if (minimizeRoundtrips) { + assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards - 1)); + } else { + assertThat(remoteClusterSearchInfo.getSkippedShards(), equalTo(remoteNumShards)); + } + assertThat(remoteClusterSearchInfo.getFailedShards(), equalTo(0)); + assertThat(remoteClusterSearchInfo.getFailures().size(), equalTo(0)); + assertThat(remoteClusterSearchInfo.getTook().millis(), greaterThanOrEqualTo(0L)); + } + { + AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertNotNull(statusResponse); + assertFalse(statusResponse.isPartial()); + + SearchResponse.Clusters clusters = statusResponse.getClusters(); + assertFalse("search cluster results should NOT be marked as partial", clusters.hasPartialResults()); + assertThat(clusters.getTotal(), equalTo(2)); + assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(2)); + assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), equalTo(0)); + assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.RUNNING), equalTo(0)); + assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL), equalTo(0)); + assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.FAILED), equalTo(0)); + + SearchResponse.Cluster localClusterSearchInfo = clusters.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).get(); + assertNotNull(localClusterSearchInfo); + SearchResponse.Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER).get(); + assertNotNull(remoteClusterSearchInfo); + + assertThat(localClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.SUCCESSFUL)); + assertThat(localClusterSearchInfo.getTotalShards(), equalTo(localNumShards)); + assertThat(localClusterSearchInfo.getSuccessfulShards(), equalTo(localNumShards)); + if (dfs) { + // no skipped shards locally when DFS_QUERY_THEN_FETCH is used + assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(0)); + } else { + assertThat(localClusterSearchInfo.getSkippedShards(), equalTo(localNumShards - 1)); + } + assertThat(localClusterSearchInfo.getFailedShards(), equalTo(0)); + assertThat(localClusterSearchInfo.getFailures().size(), equalTo(0)); + assertThat(localClusterSearchInfo.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteClusterSearchInfo.getStatus(), equalTo(SearchResponse.Cluster.Status.SUCCESSFUL)); assertThat(remoteClusterSearchInfo.getTotalShards(), equalTo(remoteNumShards)); assertThat(remoteClusterSearchInfo.getSuccessfulShards(), equalTo(remoteNumShards)); @@ -374,6 +421,7 @@ public void testClusterDetailsAfterCCSWithFailuresOnAllShards() throws Exception { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertTrue(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(2)); @@ -404,6 +452,8 @@ public void testClusterDetailsAfterCCSWithFailuresOnAllShards() throws Exception // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertTrue(statusResponse.isPartial()); + SearchResponse.Clusters clusters = statusResponse.getClusters(); assertThat(clusters.getTotal(), equalTo(2)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(0)); @@ -481,6 +531,8 @@ public void testClusterDetailsAfterCCSWithFailuresOnOneShardOnly() throws Except { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertTrue(finishedResponse.isPartial()); + SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(2)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(0)); @@ -516,6 +568,8 @@ public void testClusterDetailsAfterCCSWithFailuresOnOneShardOnly() throws Except // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertTrue(statusResponse.isPartial()); + SearchResponse.Clusters clusters = statusResponse.getClusters(); assertThat(clusters.getTotal(), equalTo(2)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(0)); @@ -606,6 +660,7 @@ public void testClusterDetailsAfterCCSWithFailuresOnOneClusterOnly() throws Exce { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertTrue(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(2)); @@ -657,6 +712,8 @@ public void testClusterDetailsAfterCCSWithFailuresOnOneClusterOnly() throws Exce // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertTrue(statusResponse.isPartial()); + SearchResponse.Clusters clusters = statusResponse.getClusters(); assertThat(clusters.getTotal(), equalTo(2)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(1)); @@ -736,6 +793,7 @@ public void testCCSWithSearchTimeout() throws Exception { { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); assertTrue(finishedResponse.getSearchResponse().isTimedOut()); + assertTrue(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(2)); @@ -772,6 +830,7 @@ public void testCCSWithSearchTimeout() throws Exception { // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertTrue(statusResponse.isPartial()); SearchResponse.Clusters clusters = statusResponse.getClusters(); assertThat(clusters.getTotal(), equalTo(2)); @@ -837,6 +896,7 @@ public void testRemoteClusterOnlyCCSSuccessfulResult() throws Exception { { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertFalse(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertFalse("search cluster results should NOT be marked as partial", clusters.hasPartialResults()); @@ -863,6 +923,7 @@ public void testRemoteClusterOnlyCCSSuccessfulResult() throws Exception { // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertFalse(statusResponse.isPartial()); SearchResponse.Clusters clusters = statusResponse.getClusters(); assertFalse("search cluster results should NOT be marked as partial", clusters.hasPartialResults()); @@ -918,6 +979,7 @@ public void testRemoteClusterOnlyCCSWithFailuresOnOneShardOnly() throws Exceptio { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertTrue(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(1)); @@ -944,6 +1006,8 @@ public void testRemoteClusterOnlyCCSWithFailuresOnOneShardOnly() throws Exceptio // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertTrue(statusResponse.isPartial()); + SearchResponse.Clusters clusters = statusResponse.getClusters(); assertThat(clusters.getTotal(), equalTo(1)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(0)); @@ -1002,6 +1066,7 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception { waitForSearchTasksToFinish(); { AsyncSearchResponse finishedResponse = getAsyncSearch(response.getId()); + assertTrue(finishedResponse.isPartial()); SearchResponse.Clusters clusters = finishedResponse.getSearchResponse().getClusters(); assertThat(clusters.getTotal(), equalTo(1)); @@ -1029,6 +1094,8 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception { // check that the async_search/status response includes the same cluster details { AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); + assertTrue(statusResponse.isPartial()); + SearchResponse.Clusters clusters = statusResponse.getClusters(); assertThat(clusters.getTotal(), equalTo(1)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL), equalTo(0)); @@ -1179,7 +1246,6 @@ public void testCancelViaTasksAPI() throws Exception { assertThat(json, matchesRegex(".*task (was)?\s*cancelled.*")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99519") public void testCancelViaAsyncSearchDelete() throws Exception { Map testClusterInfo = setupTwoClusters(); String localIndex = (String) testClusterInfo.get("local.index"); @@ -1363,6 +1429,8 @@ public void testCancellationViaTimeoutWithAllowPartialResultsSetToFalse() throws AsyncStatusResponse statusResponse = getAsyncStatus(response.getId()); assertFalse(statusResponse.isRunning()); + assertTrue(statusResponse.isPartial()); + assertEquals(0, statusResponse.getSuccessfulShards()); assertEquals(0, statusResponse.getSkippedShards()); assertThat(statusResponse.getFailedShards(), greaterThanOrEqualTo(1)); diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestGetAsyncStatusAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestGetAsyncStatusAction.java index eef791c1bf9d8..31a1af22ef8db 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestGetAsyncStatusAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestGetAsyncStatusAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.async.GetAsyncStatusRequest; import org.elasticsearch.xpack.core.search.action.GetAsyncStatusAction; @@ -34,6 +34,6 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { GetAsyncStatusRequest statusRequest = new GetAsyncStatusRequest(request.param("id")); - return channel -> client.execute(GetAsyncStatusAction.INSTANCE, statusRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute(GetAsyncStatusAction.INSTANCE, statusRequest, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java index 60f477542dae5..0c3c339adb8bd 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.search; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; @@ -102,7 +102,7 @@ public void onResponse(AsyncSearchResponse searchResponse) { AsyncSearchResponse initialResp = searchResponse.clone(searchResponse.getId()); store.createResponse(docId, searchTask.getOriginHeaders(), initialResp, new ActionListener<>() { @Override - public void onResponse(IndexResponse r) { + public void onResponse(DocWriteResponse r) { if (searchResponse.isRunning()) { try { // store the final response on completion unless the submit is cancelled diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index e2157345e025f..dae6e1e980eb4 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; @@ -44,7 +44,7 @@ protected Collection> getPlugins() { public void testFetchFailuresAllShards() throws Exception { for (int i = 0; i < 10; i++) { - IndexResponse indexResponse = client().index(new IndexRequest("boom" + i).id("boom" + i).source("text", "value")).get(); + DocWriteResponse indexResponse = client().index(new IndexRequest("boom" + i).id("boom" + i).source("text", "value")).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } client().admin().indices().refresh(new RefreshRequest()).get(); @@ -83,11 +83,11 @@ public void testFetchFailuresAllShards() throws Exception { public void testFetchFailuresOnlySomeShards() throws Exception { for (int i = 0; i < 5; i++) { - IndexResponse indexResponse = client().index(new IndexRequest("boom" + i).id("boom" + i).source("text", "value")).get(); + DocWriteResponse indexResponse = client().index(new IndexRequest("boom" + i).id("boom" + i).source("text", "value")).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } for (int i = 0; i < 5; i++) { - IndexResponse indexResponse = client().index(new IndexRequest("index" + i).id("index" + i).source("text", "value")).get(); + DocWriteResponse indexResponse = client().index(new IndexRequest("index" + i).id("index" + i).source("text", "value")).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } client().admin().indices().refresh(new RefreshRequest()).get(); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java index ad66e34cd371e..845997872ed8d 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -75,7 +74,7 @@ public void testFailOverOnFollower() throws Exception { } if (frequently()) { String id = Integer.toString(frequently() ? docID.incrementAndGet() : between(0, 10)); // sometimes update - IndexResponse indexResponse = leaderClient().prepareIndex(leaderIndex) + DocWriteResponse indexResponse = leaderClient().prepareIndex(leaderIndex) .setId(id) .setSource("{\"f\":" + id + "}", XContentType.JSON) .get(); @@ -142,7 +141,7 @@ public void testFollowIndexAndCloseNode() throws Exception { } Object[] args = new Object[] { counter++ }; final String source = Strings.format("{\"f\":%d}", args); - IndexResponse indexResp = leaderClient().prepareIndex("index1") + DocWriteResponse indexResp = leaderClient().prepareIndex("index1") .setSource(source, XContentType.JSON) .setTimeout(TimeValue.timeValueSeconds(1)) .get(); @@ -291,7 +290,7 @@ public void testReadRequestsReturnLatestMappingVersion() throws Exception { assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("balance")); }); - IndexResponse indexResp = leaderCluster.client() + DocWriteResponse indexResp = leaderCluster.client() .prepareIndex("leader-index") .setId("1") .setSource("{\"balance\": 100}", XContentType.JSON) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java index a84b9cb2f52f2..2b33e18d83bfb 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -23,7 +24,7 @@ public class DeleteInternalCcrRepositoryAction extends ActionType ActionResponse.Empty.INSTANCE); + super(NAME, Writeable.Reader.localOnly()); } public static class TransportDeleteInternalRepositoryAction extends TransportAction< diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java index 23cf2b64d4118..bc7ca12a49e22 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -29,7 +30,7 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("DeleteInternalRepositoryRequest cannot be serialized for sending across the wire."); + TransportAction.localOnly(); } public String getName() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java index d8e323583c4de..68c3ff97e26fe 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -23,7 +24,7 @@ public class PutInternalCcrRepositoryAction extends ActionType ActionResponse.Empty.INSTANCE); + super(NAME, Writeable.Reader.localOnly()); } public static class TransportPutInternalRepositoryAction extends TransportAction< diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java index 152555073ef00..6d92062035d76 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -31,7 +32,7 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("PutInternalRepositoryRequest cannot be serialized for sending across the wire."); + TransportAction.localOnly(); } public String getName() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicResponse.java index cc22d7394babd..9ef14ac72528c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicResponse.java @@ -10,10 +10,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.protocol.xpack.common.ProtocolUtils; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -21,7 +21,7 @@ import java.util.Map; import java.util.Objects; -public class PostStartBasicResponse extends AcknowledgedResponse implements StatusToXContentObject { +public class PostStartBasicResponse extends AcknowledgedResponse implements ToXContentObject { private static final ParseField BASIC_WAS_STARTED_FIELD = new ParseField("basic_was_started"); private static final ParseField ERROR_MESSAGE_FIELD = new ParseField("error_message"); @@ -116,7 +116,6 @@ protected void addCustomFields(XContentBuilder builder, Params params) throws IO } } - @Override public RestStatus status() { return status.restStatus; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java index 815ea0ffa3914..38a7ea7f7da29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; import java.util.List; @@ -35,7 +35,11 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli startBasicRequest.acknowledge(request.paramAsBoolean("acknowledge", false)); startBasicRequest.timeout(request.paramAsTime("timeout", startBasicRequest.timeout())); startBasicRequest.masterNodeTimeout(request.paramAsTime("master_timeout", startBasicRequest.masterNodeTimeout())); - return channel -> client.execute(PostStartBasicAction.INSTANCE, startBasicRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute( + PostStartBasicAction.INSTANCE, + startBasicRequest, + new RestToXContentListener<>(channel, PostStartBasicResponse::status) + ); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractGetResourcesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractGetResourcesResponse.java index 70000cb64d2a0..3324b37df01e4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractGetResourcesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractGetResourcesResponse.java @@ -11,18 +11,15 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.action.util.QueryPage; import java.io.IOException; import java.util.Objects; -public abstract class AbstractGetResourcesResponse extends ActionResponse - implements - StatusToXContentObject { +public abstract class AbstractGetResourcesResponse extends ActionResponse implements ToXContentObject { private QueryPage resources; @@ -47,11 +44,6 @@ public void writeTo(StreamOutput out) throws IOException { resources.writeTo(out); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java index e1fa08f3c9bea..6f6cc6c259e34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java @@ -15,11 +15,11 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; @@ -214,7 +214,7 @@ public SecurityContext getSecurityContext() { * Currently for EQL we don't set limit for a stored async response * TODO: add limit for stored async response in EQL, and instead of this method use createResponse */ - public void createResponseForEQL(String docId, Map headers, R response, ActionListener listener) { + public void createResponseForEQL(String docId, Map headers, R response, ActionListener listener) { try { final ReleasableBytesStreamOutput buffer = new ReleasableBytesStreamOutput(0, bigArrays.withCircuitBreaking()); final XContentBuilder source = XContentFactory.jsonBuilder(buffer); @@ -239,7 +239,7 @@ public void createResponseForEQL(String docId, Map headers, R re * Stores the initial response with the original headers of the authenticated user * and the expected expiration time. */ - public void createResponse(String docId, Map headers, R response, ActionListener listener) + public void createResponse(String docId, Map headers, R response, ActionListener listener) throws IOException { try { final ReleasableBytesStreamOutput buffer = new ReleasableBytesStreamOutputWithLimit( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java index fe24c6f0d5cb2..f4c3704cd65c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java @@ -10,10 +10,10 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; @@ -116,7 +116,7 @@ public void error(String resourceId, String message) { indexDoc(messageFactory.newMessage(resourceId, message, Level.ERROR, new Date(), nodeName)); } - private static void onIndexResponse(IndexResponse response) { + private static void onIndexResponse(DocWriteResponse response) { logger.trace("Successfully wrote audit message"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java index 217ab505f7ebf..09e59096283c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -138,7 +137,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - public static class Response extends AbstractGetResourcesResponse implements StatusToXContentObject { + public static class Response extends AbstractGetResourcesResponse implements ToXContentObject { public Response(QueryPage calendars) { super(calendars); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index e213703b56892..f37264cc6bc9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xpack.core.action.AbstractGetResourcesRequest; import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse; import org.elasticsearch.xpack.core.action.util.QueryPage; @@ -55,7 +55,7 @@ public String getResourceIdField() { } } - public static class Response extends AbstractGetResourcesResponse implements StatusToXContentObject { + public static class Response extends AbstractGetResourcesResponse implements ToXContentObject { public Response(QueryPage filters) { super(filters); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java index f245b6a9754b4..c7b2bcee8a69f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java @@ -12,10 +12,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -33,7 +32,7 @@ private PostDataAction() { super(NAME, PostDataAction.Response::new); } - public static class Response extends BaseTasksResponse implements StatusToXContentObject, Writeable { + public static class Response extends BaseTasksResponse implements ToXContentObject, Writeable { private final DataCounts dataCounts; @@ -62,11 +61,6 @@ public DataCounts getDataCounts() { return dataCounts; } - @Override - public RestStatus status() { - return RestStatus.ACCEPTED; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index f0801a9ab1a8c..b8bff389061b1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -151,10 +149,10 @@ public boolean equals(Object obj) { } } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { private static final ParseField MODEL = new ParseField("model"); - private ModelSnapshot model; + private final ModelSnapshot model; public Response(StreamInput in) throws IOException { super(in); @@ -174,11 +172,6 @@ public void writeTo(StreamOutput out) throws IOException { model.writeTo(out); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java index b253e4144ba2b..c83b7d7578bf1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -153,7 +151,7 @@ public boolean equals(Object obj) { } } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); private static final ParseField MODEL = new ParseField("model"); @@ -178,11 +176,6 @@ public void writeTo(StreamOutput out) throws IOException { model.writeTo(out); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java index 1c0f79a686390..9641f9afbb00c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java @@ -11,8 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -32,7 +31,7 @@ private UpdateProcessAction() { super(NAME, UpdateProcessAction.Response::new); } - public static class Response extends BaseTasksResponse implements StatusToXContentObject, Writeable { + public static class Response extends BaseTasksResponse implements ToXContentObject, Writeable { private final boolean isUpdated; @@ -56,11 +55,6 @@ public boolean isUpdated() { return isUpdated; } - @Override - public RestStatus status() { - return RestStatus.ACCEPTED; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -90,11 +84,11 @@ public boolean equals(Object obj) { public static class Request extends JobTaskRequest { - private ModelPlotConfig modelPlotConfig; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; + private final ModelPlotConfig modelPlotConfig; + private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; private List detectorUpdates; - private MlFilter filter; - private boolean updateScheduledEvents = false; + private final MlFilter filter; + private final boolean updateScheduledEvents; public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index 9d4442f877b85..9dfa2d51f0fc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -103,6 +103,7 @@ public class TrainedModelConfig implements ToXContentObject, Writeable { public static final ParseField PER_DEPLOYMENT_MEMORY_BYTES = new ParseField("per_deployment_memory_bytes"); public static final ParseField PER_ALLOCATION_MEMORY_BYTES = new ParseField("per_allocation_memory_bytes"); + public static final ParseField PLATFORM_ARCHITECTURE = new ParseField("platform_architecture"); public static final TransportVersion VERSION_3RD_PARTY_CONFIG_ADDED = TransportVersions.V_8_0_0; public static final TransportVersion VERSION_ALLOCATION_MEMORY_ADDED = TransportVersions.V_8_500_064; @@ -168,6 +169,7 @@ private static ObjectParser createParser(boole (p, c) -> ignoreUnknownFields ? ModelPackageConfig.fromXContentLenient(p) : ModelPackageConfig.fromXContentStrict(p), MODEL_PACKAGE ); + parser.declareString(TrainedModelConfig.Builder::setPlatformArchitecture, PLATFORM_ARCHITECTURE); return parser; } @@ -195,6 +197,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo private final TrainedModelLocation location; private final ModelPackageConfig modelPackageConfig; private Boolean fullDefinition; + private String platformArchitecture; TrainedModelConfig( String modelId, @@ -213,7 +216,8 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo Map defaultFieldMap, InferenceConfig inferenceConfig, TrainedModelLocation location, - ModelPackageConfig modelPackageConfig + ModelPackageConfig modelPackageConfig, + String platformArchitecture ) { this.modelId = ExceptionsHelper.requireNonNull(modelId, MODEL_ID); this.modelType = modelType; @@ -240,6 +244,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo this.inferenceConfig = inferenceConfig; this.location = location; this.modelPackageConfig = modelPackageConfig; + this.platformArchitecture = platformArchitecture; } private static TrainedModelInput handleDefaultInput(TrainedModelInput input, TrainedModelType modelType) { @@ -279,6 +284,11 @@ public TrainedModelConfig(StreamInput in) throws IOException { modelPackageConfig = null; fullDefinition = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + platformArchitecture = in.readOptionalString(); + } else { + platformArchitecture = null; + } } public boolean isPackagedModel() { @@ -421,6 +431,10 @@ public long getPerAllocationMemoryBytes() { : 0L; } + public String getPlatformArchitecture() { + return platformArchitecture; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); @@ -451,6 +465,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(modelPackageConfig); out.writeOptionalBoolean(fullDefinition); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + out.writeOptionalString(platformArchitecture); + } } @Override @@ -463,6 +481,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (modelPackageConfig != null) { builder.field(MODEL_PACKAGE.getPreferredName(), modelPackageConfig); } + if (platformArchitecture != null) { + builder.field(PLATFORM_ARCHITECTURE.getPreferredName(), platformArchitecture); + } // If the model is to be exported for future import to another cluster, these fields are irrelevant. if (params.paramAsBoolean(EXCLUDE_GENERATED, false) == false) { @@ -543,7 +564,8 @@ public boolean equals(Object o) { && Objects.equals(defaultFieldMap, that.defaultFieldMap) && Objects.equals(inferenceConfig, that.inferenceConfig) && Objects.equals(metadata, that.metadata) - && Objects.equals(location, that.location); + && Objects.equals(location, that.location) + && Objects.equals(platformArchitecture, that.platformArchitecture); } @Override @@ -565,7 +587,8 @@ public int hashCode() { licenseLevel, inferenceConfig, defaultFieldMap, - location + location, + platformArchitecture ); } @@ -590,6 +613,7 @@ public static class Builder { private ModelPackageConfig modelPackageConfig; private Long perDeploymentMemoryBytes; private Long perAllocationMemoryBytes; + private String platformArchitecture; public Builder() {} @@ -611,6 +635,7 @@ public Builder(TrainedModelConfig config) { this.inferenceConfig = config.inferenceConfig; this.location = config.location; this.modelPackageConfig = config.modelPackageConfig; + this.platformArchitecture = config.platformArchitecture; } public Builder setModelId(String modelId) { @@ -703,6 +728,11 @@ public Builder setHyperparameters(List hyperparameters) { return addToMetadata(HYPERPARAMETERS, hyperparameters.stream().map(Hyperparameters::asMap).collect(Collectors.toList())); } + public Builder setPlatformArchitecture(String platformArchitecture) { + this.platformArchitecture = platformArchitecture; + return this; + } + public Builder setModelAliases(Set modelAliases) { if (modelAliases == null || modelAliases.isEmpty()) { return this; @@ -1022,7 +1052,8 @@ public TrainedModelConfig build() { defaultFieldMap, inferenceConfig, location, - modelPackageConfig + modelPackageConfig, + platformArchitecture ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java index ca70f9e9e761d..6c8fc6fec4e0e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java @@ -42,11 +42,15 @@ public final class InferenceIndexConstants { public static final ParseField DOC_TYPE = new ParseField("doc_type"); private static final String NATIVE_INDEX_PREFIX = INDEX_NAME_PREFIX + "native-"; - private static final String NATIVE_INDEX_VERSION = "000001"; + + // 000002 added support for platform specific models + private static final String NATIVE_INDEX_VERSION = "000002"; private static final String NATIVE_LATEST_INDEX = NATIVE_INDEX_PREFIX + NATIVE_INDEX_VERSION; private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; - public static final int INFERENCE_INDEX_MAPPINGS_VERSION = 1; + + // 2 added support for platform specific models + public static final int INFERENCE_INDEX_MAPPINGS_VERSION = 2; public static String mapping() { return TemplateUtils.loadTemplate( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java index 7399a8e9c9e31..9a5f59b84b347 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java @@ -12,21 +12,19 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.rest.RestStatus.OK; - /** * A response of an async search status request. */ -public class AsyncStatusResponse extends ActionResponse implements SearchStatusResponse, StatusToXContentObject { +public class AsyncStatusResponse extends ActionResponse implements SearchStatusResponse, ToXContentObject { private final String id; private final boolean isRunning; private final boolean isPartial; @@ -171,11 +169,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public RestStatus status() { - return OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java index 2751a6f60d3e6..6d693b8ad8fdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java @@ -46,7 +46,7 @@ public IndicesAccessControl(boolean granted, Map ind public IndicesAccessControl(boolean granted, Supplier> indexPermissionsSupplier) { this.granted = granted; - this.indexPermissionsSupplier = new CachedSupplier<>(Objects.requireNonNull(indexPermissionsSupplier)); + this.indexPermissionsSupplier = CachedSupplier.wrap(Objects.requireNonNull(indexPermissionsSupplier)); } protected IndicesAccessControl(IndicesAccessControl copy) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 18c72ea8d42cb..f6046cd41f25c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -174,6 +174,23 @@ static RoleDescriptor kibanaSystem(String name) { .allowRestrictedIndices(true) .build(), RoleDescriptor.IndicesPrivileges.builder().indices(".fleet-fileds*").privileges("all").allowRestrictedIndices(true).build(), + // 8.9 BWC + RoleDescriptor.IndicesPrivileges.builder() + .indices(".fleet-file-data-*") + .privileges("all") + .allowRestrictedIndices(true) + .build(), + RoleDescriptor.IndicesPrivileges.builder().indices(".fleet-files-*").privileges("all").allowRestrictedIndices(true).build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices(".fleet-filedelivery-data-*") + .privileges("all") + .allowRestrictedIndices(true) + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices(".fleet-filedelivery-meta-*") + .privileges("all") + .allowRestrictedIndices(true) + .build(), // Fleet telemetry queries Agent Logs indices in kibana task runner RoleDescriptor.IndicesPrivileges.builder().indices("logs-elastic_agent*").privileges("read").build(), // Legacy "Alerts as data" used in Security Solution. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfiguration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfiguration.java index 54c153c04ea31..b5ec0c138607a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfiguration.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfiguration.java @@ -15,7 +15,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -26,11 +27,10 @@ import java.io.IOException; import java.util.Comparator; import java.util.EnumSet; -import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.LongSupplier; -import java.util.function.Predicate; import static org.elasticsearch.core.Strings.format; @@ -112,169 +112,166 @@ public static SnapshotRetentionConfiguration parse(XContentParser parser, String return PARSER.apply(parser, null); } - public TimeValue getExpireAfter() { - return this.expireAfter; - } - - public Integer getMinimumSnapshotCount() { - return this.minimumSnapshotCount; - } - - public Integer getMaximumSnapshotCount() { - return this.maximumSnapshotCount; - } - /** - * Return a predicate by which a SnapshotInfo can be tested to see - * whether it should be deleted according to this retention policy. - * @param allSnapshots a list of all snapshot pertaining to this SLM policy and repository + * @return whether a snapshot should be deleted according to this retention policy. + * @param allSnapshots all the snapshot details pertaining to this SLM policy and repository */ - public Predicate getSnapshotDeletionPredicate(final List allSnapshots) { + public boolean isSnapshotEligibleForDeletion( + SnapshotId snapshotId, + RepositoryData.SnapshotDetails snapshotDetails, + Map allSnapshots + ) { + assert Strings.hasText(snapshotDetails.getSlmPolicy()); + final var snapshotState = snapshotDetails.getSnapshotState(); + final var startTimeMillis = snapshotDetails.getStartTimeMillis(); + final var snapshotName = snapshotId.getName(); + final int totalSnapshotCount = allSnapshots.size(); - final List sortedSnapshots = allSnapshots.stream().sorted(Comparator.comparingLong(SnapshotInfo::startTime)).toList(); + final var sortedSnapshots = allSnapshots.entrySet() + .stream() + .sorted(Comparator.comparingLong(e -> e.getValue().getStartTimeMillis())) + .toList(); int successCount = 0; long latestSuccessfulTimestamp = Long.MIN_VALUE; - for (SnapshotInfo snapshot : allSnapshots) { - if (snapshot.state() == SnapshotState.SUCCESS) { + for (final var snapshot : allSnapshots.values()) { + assert Objects.equals(snapshot.getSlmPolicy(), snapshotDetails.getSlmPolicy()); + if (snapshot.getSnapshotState() == SnapshotState.SUCCESS) { successCount++; - latestSuccessfulTimestamp = Math.max(latestSuccessfulTimestamp, snapshot.startTime()); + latestSuccessfulTimestamp = Math.max(latestSuccessfulTimestamp, snapshot.getStartTimeMillis()); } } final long newestSuccessfulTimestamp = latestSuccessfulTimestamp; final int successfulSnapshotCount = successCount; - return si -> { - final String snapName = si.snapshotId().getName(); - // First, if there's no expire_after and a more recent successful snapshot, we can delete all the failed ones - if (this.expireAfter == null && UNSUCCESSFUL_STATES.contains(si.state()) && newestSuccessfulTimestamp > si.startTime()) { - // There's no expire_after and there's a more recent successful snapshot, delete this failed one - logger.trace("[{}]: ELIGIBLE as it is {} and there is a more recent successful snapshot", snapName, si.state()); - return true; - } + // First, if there's no expire_after and a more recent successful snapshot, we can delete all the failed ones + if (this.expireAfter == null && UNSUCCESSFUL_STATES.contains(snapshotState) && newestSuccessfulTimestamp > startTimeMillis) { + // There's no expire_after and there's a more recent successful snapshot, delete this failed one + logger.trace("[{}]: ELIGIBLE as it is {} and there is a more recent successful snapshot", snapshotName, snapshotState); + return true; + } - // Next, enforce the maximum count, if the size is over the maximum number of - // snapshots, then allow the oldest N (where N is the number over the maximum snapshot - // count) snapshots to be eligible for deletion - if (this.maximumSnapshotCount != null && successfulSnapshotCount > this.maximumSnapshotCount) { - final long successfulSnapsToDelete = successfulSnapshotCount - this.maximumSnapshotCount; - boolean found = false; - int successfulSeen = 0; - for (SnapshotInfo s : sortedSnapshots) { - if (s.state() == SnapshotState.SUCCESS) { - successfulSeen++; - } - if (successfulSeen > successfulSnapsToDelete) { - break; - } - if (s.equals(si)) { - found = true; - break; - } + // Next, enforce the maximum count, if the size is over the maximum number of + // snapshots, then allow the oldest N (where N is the number over the maximum snapshot + // count) snapshots to be eligible for deletion + if (this.maximumSnapshotCount != null && successfulSnapshotCount > this.maximumSnapshotCount) { + final long successfulSnapsToDelete = successfulSnapshotCount - this.maximumSnapshotCount; + boolean found = false; + int successfulSeen = 0; + for (final var s : sortedSnapshots) { + if (s.getValue().getSnapshotState() == SnapshotState.SUCCESS) { + successfulSeen++; } - if (found) { - logger.trace( - "[{}]: ELIGIBLE as it is one of the {} oldest snapshots with " - + "{} non-failed snapshots ({} total), over the limit of {} maximum snapshots", - snapName, - successfulSnapsToDelete, - successfulSnapshotCount, - totalSnapshotCount, - this.maximumSnapshotCount - ); - return true; - } else { - logger.trace( - "[{}]: SKIPPING as it is not one of the {} oldest snapshots with " - + "{} non-failed snapshots ({} total), over the limit of {} maximum snapshots", - snapName, - successfulSnapsToDelete, - successfulSnapshotCount, - totalSnapshotCount, - this.maximumSnapshotCount - ); + if (successfulSeen > successfulSnapsToDelete) { + break; + } + if (s.getKey().equals(snapshotId)) { + found = true; + break; } } + if (found) { + logger.trace( + "[{}]: ELIGIBLE as it is one of the {} oldest snapshots with " + + "{} non-failed snapshots ({} total), over the limit of {} maximum snapshots", + snapshotName, + successfulSnapsToDelete, + successfulSnapshotCount, + totalSnapshotCount, + this.maximumSnapshotCount + ); + return true; + } else { + logger.trace( + "[{}]: SKIPPING as it is not one of the {} oldest snapshots with " + + "{} non-failed snapshots ({} total), over the limit of {} maximum snapshots", + snapshotName, + successfulSnapsToDelete, + successfulSnapshotCount, + totalSnapshotCount, + this.maximumSnapshotCount + ); + } + } - // Next check the minimum count, since that is a blanket requirement regardless of time, - // if we haven't hit the minimum then we need to keep the snapshot regardless of - // expiration time - if (this.minimumSnapshotCount != null && successfulSnapshotCount <= this.minimumSnapshotCount) { - if (UNSUCCESSFUL_STATES.contains(si.state()) == false) { - logger.trace( - "[{}]: INELIGIBLE as there are {} non-failed snapshots ({} total) and {} minimum snapshots needed", - snapName, - successfulSnapshotCount, - totalSnapshotCount, - this.minimumSnapshotCount - ); - return false; - } else { - logger.trace( - "[{}]: SKIPPING minimum snapshot count check as this snapshot is {} and not counted " - + "towards the minimum snapshot count.", - snapName, - si.state() - ); - } + // Next check the minimum count, since that is a blanket requirement regardless of time, + // if we haven't hit the minimum then we need to keep the snapshot regardless of + // expiration time + if (this.minimumSnapshotCount != null && successfulSnapshotCount <= this.minimumSnapshotCount) { + if (UNSUCCESSFUL_STATES.contains(snapshotState) == false) { + logger.trace( + "[{}]: INELIGIBLE as there are {} non-failed snapshots ({} total) and {} minimum snapshots needed", + snapshotName, + successfulSnapshotCount, + totalSnapshotCount, + this.minimumSnapshotCount + ); + return false; + } else { + logger.trace( + "[{}]: SKIPPING minimum snapshot count check as this snapshot is {} and not counted " + + "towards the minimum snapshot count.", + snapshotName, + snapshotState + ); } + } - // Finally, check the expiration time of the snapshot, if it is past, then it is - // eligible for deletion - if (this.expireAfter != null) { - if (this.minimumSnapshotCount != null) { - // Only the oldest N snapshots are actually eligible, since if we went below this we - // would fall below the configured minimum number of snapshots to keep - final boolean maybeEligible; - if (si.state() == SnapshotState.SUCCESS) { - maybeEligible = sortedSnapshots.stream() - .filter(snap -> SnapshotState.SUCCESS.equals(snap.state())) - .limit(Math.max(0, successfulSnapshotCount - minimumSnapshotCount)) - .anyMatch(si::equals); - } else if (UNSUCCESSFUL_STATES.contains(si.state())) { - maybeEligible = sortedSnapshots.contains(si); - } else { - logger.trace("[{}] INELIGIBLE because snapshot is in state [{}]", snapName, si.state()); - return false; - } - if (maybeEligible == false) { - // This snapshot is *not* one of the N oldest snapshots, so even if it were - // old enough, the other snapshots would be deleted before it - logger.trace( - "[{}]: INELIGIBLE as snapshot expiration would pass the " - + "minimum number of configured snapshots ({}) to keep, regardless of age", - snapName, - this.minimumSnapshotCount - ); - return false; - } - } - final long snapshotAge = nowSupplier.getAsLong() - si.startTime(); - if (snapshotAge > this.expireAfter.getMillis()) { - logger.trace( - () -> format( - "[%s]: ELIGIBLE as snapshot age of %s is older than %s", - snapName, - new TimeValue(snapshotAge).toHumanReadableString(3), - this.expireAfter.toHumanReadableString(3) - ) - ); - return true; + // Finally, check the expiration time of the snapshot, if it is past, then it is + // eligible for deletion + if (this.expireAfter != null) { + if (this.minimumSnapshotCount != null) { + // Only the oldest N snapshots are actually eligible, since if we went below this we + // would fall below the configured minimum number of snapshots to keep + final boolean maybeEligible; + if (snapshotState == SnapshotState.SUCCESS) { + maybeEligible = sortedSnapshots.stream() + .filter(snap -> SnapshotState.SUCCESS.equals(snap.getValue().getSnapshotState())) + .limit(Math.max(0, successfulSnapshotCount - minimumSnapshotCount)) + .anyMatch(s -> s.getKey().equals(snapshotId)); + } else if (UNSUCCESSFUL_STATES.contains(snapshotState)) { + maybeEligible = allSnapshots.containsKey(snapshotId); } else { + logger.trace("[{}] INELIGIBLE because snapshot is in state [{}]", snapshotName, snapshotState); + return false; + } + if (maybeEligible == false) { + // This snapshot is *not* one of the N oldest snapshots, so even if it were + // old enough, the other snapshots would be deleted before it logger.trace( - () -> format( - "[%s]: INELIGIBLE as snapshot age of [%sms] is newer than %s", - snapName, - new TimeValue(snapshotAge).toHumanReadableString(3), - this.expireAfter.toHumanReadableString(3) - ) + "[{}]: INELIGIBLE as snapshot expiration would pass the " + + "minimum number of configured snapshots ({}) to keep, regardless of age", + snapshotName, + this.minimumSnapshotCount ); return false; } } - // If nothing matched, the snapshot is not eligible for deletion - logger.trace("[{}]: INELIGIBLE as no retention predicates matched", snapName); - return false; - }; + final long snapshotAge = nowSupplier.getAsLong() - startTimeMillis; + if (snapshotAge > this.expireAfter.getMillis()) { + logger.trace( + () -> format( + "[%s]: ELIGIBLE as snapshot age of %s is older than %s", + snapshotName, + new TimeValue(snapshotAge).toHumanReadableString(3), + this.expireAfter.toHumanReadableString(3) + ) + ); + return true; + } else { + logger.trace( + () -> format( + "[%s]: INELIGIBLE as snapshot age of [%sms] is newer than %s", + snapshotName, + new TimeValue(snapshotAge).toHumanReadableString(3), + this.expireAfter.toHumanReadableString(3) + ) + ); + return false; + } + } + // If nothing matched, the snapshot is not eligible for deletion + logger.trace("[{}]: INELIGIBLE as no retention predicates matched", snapshotName); + return false; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java index 8387511fa50f5..e760471a6f1c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/action/FindStructureAction.java @@ -15,11 +15,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.grok.GrokBuiltinPatterns; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.textstructure.structurefinder.TextStructure; @@ -42,7 +41,7 @@ private FindStructureAction() { super(NAME, Response::new); } - public static class Response extends ActionResponse implements StatusToXContentObject, Writeable { + public static class Response extends ActionResponse implements ToXContentObject, Writeable { private final TextStructure textStructure; @@ -60,11 +59,6 @@ public void writeTo(StreamOutput out) throws IOException { textStructure.writeTo(out); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { textStructure.toXContent(builder, params); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java index 1845e36ad34fd..a2f4239b4a3f9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateResponse; @@ -185,7 +185,7 @@ public void testRetrieveFromMemoryWithExpiration() throws Exception { if (updateInitialResultsInStore) { // we need to store initial result - PlainActionFuture future = new PlainActionFuture<>(); + PlainActionFuture future = new PlainActionFuture<>(); indexService.createResponse( task.getExecutionId().getDocId(), task.getOriginHeaders(), @@ -233,7 +233,7 @@ public void testAssertExpirationPropagation() throws Exception { if (updateInitialResultsInStore) { // we need to store initial result - PlainActionFuture future = new PlainActionFuture<>(); + PlainActionFuture future = new PlainActionFuture<>(); indexService.createResponse( task.getExecutionId().getDocId(), task.getOriginHeaders(), @@ -275,7 +275,7 @@ public void testRetrieveFromDisk() throws Exception { if (updateInitialResultsInStore) { // we need to store initial result - PlainActionFuture futureCreate = new PlainActionFuture<>(); + PlainActionFuture futureCreate = new PlainActionFuture<>(); indexService.createResponse( task.getExecutionId().getDocId(), task.getOriginHeaders(), @@ -293,7 +293,7 @@ public void testRetrieveFromDisk() throws Exception { ); futureUpdate.actionGet(TimeValue.timeValueSeconds(10)); } else { - PlainActionFuture futureCreate = new PlainActionFuture<>(); + PlainActionFuture futureCreate = new PlainActionFuture<>(); indexService.createResponse( task.getExecutionId().getDocId(), task.getOriginHeaders(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchIndexServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchIndexServiceTests.java index 379530d72786d..61d48817f03a4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchIndexServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchIndexServiceTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.service.ClusterService; @@ -142,7 +141,7 @@ public void testEncodeSearchResponse() throws IOException { new TaskId(randomAlphaOfLength(10), randomNonNegativeLong()) ); - PlainActionFuture createFuture = new PlainActionFuture<>(); + PlainActionFuture createFuture = new PlainActionFuture<>(); indexService.createResponse(executionId.getDocId(), Map.of(), initialResponse, createFuture); assertThat(createFuture.actionGet().getResult(), equalTo(DocWriteResponse.Result.CREATED)); @@ -264,7 +263,7 @@ public CircuitBreakerStats stats(String name) { { circuitBreaker.adjustLimit(randomIntBetween(1, 64)); // small limit TestAsyncResponse initialResponse = new TestAsyncResponse(testMessage, expirationTime); - PlainActionFuture createFuture = new PlainActionFuture<>(); + PlainActionFuture createFuture = new PlainActionFuture<>(); indexService.createResponse(executionId.getDocId(), Map.of(), initialResponse, createFuture); CircuitBreakingException e = expectThrows(CircuitBreakingException.class, createFuture::actionGet); assertEquals(0, e.getSuppressed().length); // no other suppressed exceptions @@ -273,7 +272,7 @@ public CircuitBreakerStats stats(String name) { { circuitBreaker.adjustLimit(randomIntBetween(16 * 1024, 1024 * 1024)); // large enough TestAsyncResponse initialResponse = new TestAsyncResponse(testMessage, expirationTime); - PlainActionFuture createFuture = new PlainActionFuture<>(); + PlainActionFuture createFuture = new PlainActionFuture<>(); indexService.createResponse(executionId.getDocId(), Map.of(), initialResponse, createFuture); assertThat(createFuture.actionGet().getResult(), equalTo(DocWriteResponse.Result.CREATED)); assertThat(circuitBreaker.getUsed(), equalTo(0L)); @@ -337,7 +336,7 @@ public void testMaxAsyncSearchResponseSize() throws Exception { new TaskId(randomAlphaOfLength(10), randomNonNegativeLong()) ); TestAsyncResponse initialResponse = new TestAsyncResponse(randomAlphaOfLength(130), randomLong()); - PlainActionFuture createFuture1 = new PlainActionFuture<>(); + PlainActionFuture createFuture1 = new PlainActionFuture<>(); indexService.createResponse(executionId1.getDocId(), Map.of(), initialResponse, createFuture1); createFuture1.actionGet(); @@ -369,7 +368,7 @@ public void testMaxAsyncSearchResponseSize() throws Exception { Long.toString(randomNonNegativeLong()), new TaskId(randomAlphaOfLength(10), randomNonNegativeLong()) ); - PlainActionFuture createFuture = new PlainActionFuture<>(); + PlainActionFuture createFuture = new PlainActionFuture<>(); TestAsyncResponse initialResponse2 = new TestAsyncResponse(randomAlphaOfLength(130), randomLong()); indexService.createResponse(executionId2.getDocId(), Map.of(), initialResponse2, createFuture); IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, createFuture::actionGet); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java index 85689249486d1..d43295b2fe543 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java @@ -6,10 +6,10 @@ */ package org.elasticsearch.xpack.core.async; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateResponse; @@ -217,7 +217,7 @@ public void testAutoCreateIndex() throws Exception { AsyncExecutionId id = new AsyncExecutionId("0", new TaskId("N/A", 0)); AsyncSearchResponse resp = new AsyncSearchResponse(id.getEncoded(), true, true, 0L, 0L); { - PlainActionFuture future = PlainActionFuture.newFuture(); + PlainActionFuture future = PlainActionFuture.newFuture(); indexService.createResponse(id.getDocId(), Collections.emptyMap(), resp, future); future.get(); assertSettings(); @@ -252,7 +252,7 @@ public void testAutoCreateIndex() throws Exception { // But the index is still auto-created { - PlainActionFuture future = PlainActionFuture.newFuture(); + PlainActionFuture future = PlainActionFuture.newFuture(); indexService.createResponse(id.getDocId(), Collections.emptyMap(), resp, future); future.get(); assertSettings(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java index 51f65fe0c9a0b..8b382beeb0644 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -104,7 +105,8 @@ public static TrainedModelConfig.Builder createTestInstance(String modelId, bool .setLicenseLevel(randomFrom(License.OperationMode.PLATINUM.description(), License.OperationMode.BASIC.description())) .setInferenceConfig(randomFrom(inferenceConfigs)) .setTags(tags) - .setLocation(randomBoolean() ? null : IndexLocationTests.randomInstance()); + .setLocation(randomBoolean() ? null : IndexLocationTests.randomInstance()) + .setPlatformArchitecture(randomBoolean() ? null : randomAlphaOfLength(10)); } @Before @@ -191,7 +193,8 @@ public void testToXContentWithParams() throws IOException { .collect(Collectors.toMap(Function.identity(), (k) -> randomAlphaOfLength(10))), randomFrom(ClassificationConfigTests.randomClassificationConfig(), RegressionConfigTests.randomRegressionConfig()), null, - ModelPackageConfigTests.randomModulePackageConfig() + ModelPackageConfigTests.randomModulePackageConfig(), + randomAlphaOfLength(10) ); BytesReference reference = XContentHelper.toXContent(config, XContentType.JSON, ToXContent.EMPTY_PARAMS, false); @@ -241,7 +244,8 @@ public void testParseWithBothDefinitionAndCompressedSupplied() throws IOExceptio .collect(Collectors.toMap(Function.identity(), (k) -> randomAlphaOfLength(10))), randomFrom(ClassificationConfigTests.randomClassificationConfig(), RegressionConfigTests.randomRegressionConfig()), null, - ModelPackageConfigTests.randomModulePackageConfig() + ModelPackageConfigTests.randomModulePackageConfig(), + randomAlphaOfLength(10) ); BytesReference reference = XContentHelper.toXContent(config, XContentType.JSON, ToXContent.EMPTY_PARAMS, false); @@ -453,6 +457,9 @@ protected TrainedModelConfig mutateInstanceForVersion(TrainedModelConfig instanc if (instance.getInferenceConfig() instanceof NlpConfig nlpConfig) { builder.setInferenceConfig(InferenceConfigItemTestCase.mutateForVersion(nlpConfig, version)); } + if (version.before(TransportVersions.ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED)) { + builder.setPlatformArchitecture(null); + } return builder.build(); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java index 02034eb6bd2da..ec3fc2b8a88ef 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; @@ -23,6 +24,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -45,6 +47,23 @@ public void testConflictingSettings() { assertThat(e.getMessage(), containsString("minimum snapshot count 3 cannot be larger than maximum snapshot count 1")); } + private static Map detailsMap(SnapshotInfo... snapshotInfos) { + return Arrays.stream(snapshotInfos) + .collect(Collectors.toMap(SnapshotInfo::snapshotId, RepositoryData.SnapshotDetails::fromSnapshotInfo)); + } + + private static boolean isSnapshotEligibleForDeletion( + SnapshotRetentionConfiguration snapshotRetentionConfiguration, + SnapshotInfo si, + Map allSnapshots + ) { + return snapshotRetentionConfiguration.isSnapshotEligibleForDeletion( + si.snapshotId(), + RepositoryData.SnapshotDetails.fromSnapshotInfo(si), + allSnapshots + ); + } + public void testExpireAfter() { SnapshotRetentionConfiguration conf = new SnapshotRetentionConfiguration( () -> TimeValue.timeValueDays(1).millis() + 1, @@ -53,16 +72,14 @@ public void testExpireAfter() { null ); SnapshotInfo oldInfo = makeInfo(0); - assertThat(conf.getSnapshotDeletionPredicate(Collections.singletonList(oldInfo)).test(oldInfo), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, detailsMap(oldInfo)), equalTo(true)); SnapshotInfo newInfo = makeInfo(1); - assertThat(conf.getSnapshotDeletionPredicate(Collections.singletonList(newInfo)).test(newInfo), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, detailsMap(newInfo)), equalTo(false)); - List infos = new ArrayList<>(); - infos.add(newInfo); - infos.add(oldInfo); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(newInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(oldInfo), equalTo(true)); + final var infos = detailsMap(newInfo, oldInfo); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, infos), equalTo(true)); } public void testExpiredWithMinimum() { @@ -75,15 +92,13 @@ public void testExpiredWithMinimum() { SnapshotInfo oldInfo = makeInfo(0); SnapshotInfo newInfo = makeInfo(1); - List infos = new ArrayList<>(); - infos.add(newInfo); - infos.add(oldInfo); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(newInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(oldInfo), equalTo(false)); + final var infos = detailsMap(newInfo, oldInfo); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, infos), equalTo(false)); conf = new SnapshotRetentionConfiguration(() -> TimeValue.timeValueDays(1).millis() + 1, TimeValue.timeValueDays(1), 1, null); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(newInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(oldInfo), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, infos), equalTo(true)); } public void testMaximum() { @@ -98,16 +113,16 @@ public void testMaximum() { SnapshotInfo s8 = makeInfo(8); SnapshotInfo s9 = makeInfo(9); - List infos = Arrays.asList(s1, s2, s3, s4, s5, s6, s7, s8, s9); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s3), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s4), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s5), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s6), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s7), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s8), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s9), equalTo(false)); + final var infos = detailsMap(s1, s2, s3, s4, s5, s6, s7, s8, s9); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s3, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s4, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s5, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s6, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s7, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s8, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s9, infos), equalTo(false)); } public void testMaximumWithExpireAfter() { @@ -121,10 +136,10 @@ public void testMaximumWithExpireAfter() { SnapshotInfo old2 = makeInfo(1); SnapshotInfo new1 = makeInfo(2); - List infos = Arrays.asList(old1, old2, new1); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(old1), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(old2), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(new1), equalTo(false)); + final var infos = detailsMap(old1, old2, new1); + assertThat(isSnapshotEligibleForDeletion(conf, old1, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, old2, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, new1, infos), equalTo(false)); } public void testMaximumWithFailedOrPartial() { @@ -134,11 +149,11 @@ public void testMaximumWithFailedOrPartial() { SnapshotInfo s3 = makeInfo(3); SnapshotInfo s4 = makeInfo(4); - List infos = Arrays.asList(s1, s2, s3, s4); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s3), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s4), equalTo(false)); + final var infos = detailsMap(s1, s2, s3, s4); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s3, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s4, infos), equalTo(false)); } public void testFailuresDeletedIfExpired() { @@ -157,16 +172,14 @@ private void assertUnsuccessfulDeletedIfExpired(boolean failure) { null ); SnapshotInfo oldInfo = makeFailureOrPartial(0, failure); - assertThat(conf.getSnapshotDeletionPredicate(Collections.singletonList(oldInfo)).test(oldInfo), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, detailsMap(oldInfo)), equalTo(true)); SnapshotInfo newInfo = makeFailureOrPartial(1, failure); - assertThat(conf.getSnapshotDeletionPredicate(Collections.singletonList(newInfo)).test(newInfo), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, detailsMap(newInfo)), equalTo(false)); - List infos = new ArrayList<>(); - infos.add(newInfo); - infos.add(oldInfo); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(newInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(oldInfo), equalTo(true)); + final var infos = detailsMap(oldInfo, newInfo); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, infos), equalTo(true)); } public void testFailuresDeletedIfNoExpiryAndMoreRecentSuccessExists() { @@ -184,11 +197,11 @@ private void assertUnsuccessfulDeletedIfNoExpiryAndMoreRecentSuccessExists(boole SnapshotInfo s3 = makeFailureOrPartial(3, failure); SnapshotInfo s4 = makeInfo(4); - List infos = Arrays.asList(s1, s2, s3, s4); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s3), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s4), equalTo(false)); + final var infos = detailsMap(s1, s2, s3, s4); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s3, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, s4, infos), equalTo(false)); } public void testFailuresKeptIfNoExpiryAndNoMoreRecentSuccess() { @@ -207,11 +220,11 @@ private void assertUnsuccessfulKeptIfNoExpiryAndNoMoreRecentSuccess(boolean fail SnapshotInfo s3 = makeInfo(3); SnapshotInfo s4 = makeFailureOrPartial(4, failure); - List infos = Arrays.asList(s1, s2, s3, s4); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s3), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s4), equalTo(false)); + final var infos = detailsMap(s1, s2, s3, s4); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s3, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s4, infos), equalTo(false)); } public void testFailuresNotCountedTowardsMaximum() { @@ -230,12 +243,12 @@ private void assertUnsuccessfulNotCountedTowardsMaximum(boolean failure) { SnapshotInfo s4 = makeFailureOrPartial(4, failure); SnapshotInfo s5 = makeInfo(5); - List infos = Arrays.asList(s1, s2, s3, s4, s5); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s3), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s4), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s5), equalTo(false)); + final var infos = detailsMap(s1, s2, s3, s4, s5); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s3, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s4, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s5, infos), equalTo(false)); } public void testFailuresNotCountedTowardsMinimum() { @@ -257,18 +270,15 @@ private void assertUnsuccessfulNotCountedTowardsMinimum(boolean failure) { SnapshotInfo failureInfo = makeFailureOrPartial(1, failure); SnapshotInfo newInfo = makeInfo(2); - List infos = new ArrayList<>(); - infos.add(newInfo); - infos.add(failureInfo); - infos.add(oldInfo); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(newInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(failureInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(oldInfo), equalTo(false)); + final var infos = detailsMap(newInfo, failureInfo, oldInfo); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, failureInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, infos), equalTo(false)); conf = new SnapshotRetentionConfiguration(() -> TimeValue.timeValueDays(1).millis() + 2, TimeValue.timeValueDays(1), 1, null); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(newInfo), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(failureInfo), equalTo(true)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(oldInfo), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, newInfo, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, failureInfo, infos), equalTo(true)); + assertThat(isSnapshotEligibleForDeletion(conf, oldInfo, infos), equalTo(true)); } public void testMostRecentSuccessfulTimestampIsUsed() { @@ -279,11 +289,11 @@ public void testMostRecentSuccessfulTimestampIsUsed() { SnapshotInfo s3 = makeFailureOrPartial(3, failureBeforePartial); SnapshotInfo s4 = makeFailureOrPartial(4, failureBeforePartial == false); - List infos = Arrays.asList(s1, s2, s3, s4); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s3), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s4), equalTo(false)); + final var infos = detailsMap(s1, s2, s3, s4); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s3, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s4, infos), equalTo(false)); } public void testFewerSuccessesThanMinWithPartial() { @@ -292,10 +302,10 @@ public void testFewerSuccessesThanMinWithPartial() { SnapshotInfo sP = makePartialInfo(2); SnapshotInfo s2 = makeInfo(3); - List infos = Arrays.asList(s1, sP, s2); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s1), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(sP), equalTo(false)); - assertThat(conf.getSnapshotDeletionPredicate(infos).test(s2), equalTo(false)); + final var infos = detailsMap(s1, sP, s2); + assertThat(isSnapshotEligibleForDeletion(conf, s1, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, sP, infos), equalTo(false)); + assertThat(isSnapshotEligibleForDeletion(conf, s2, infos), equalTo(false)); } private SnapshotInfo makeInfo(long startTime) { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json index 7ff961a0aac9c..77634546e0e6e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json @@ -12,6 +12,9 @@ "model_id": { "type": "keyword" }, + "platform_architecture" : { + "type" : "keyword" + }, "created_by": { "type": "keyword" }, diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java index aa1929614292e..e07568a32b826 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java @@ -45,6 +45,7 @@ public class Downsample extends Plugin implements ActionPlugin, PersistentTaskPl public static final String DOWSAMPLE_TASK_THREAD_POOL_NAME = "downsample_indexing"; private static final int DOWNSAMPLE_TASK_THREAD_POOL_QUEUE_SIZE = 256; + public static final String DOWNSAMPLE_MIN_NUMBER_OF_REPLICAS_NAME = "downsample.min_number_of_replicas"; @Override public List> getExecutorBuilders(Settings settings) { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java index c2b8f909618b7..013ad20ffe04d 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/LabelFieldProducer.java @@ -158,7 +158,7 @@ public void write(XContentBuilder builder) throws IOException { if (isEmpty() == false) { final HistogramValue histogramValue = (HistogramValue) label.get(); final List values = new ArrayList<>(); - final List counts = new ArrayList<>(); + final List counts = new ArrayList<>(); while (histogramValue.next()) { values.add(histogramValue.value()); counts.add(histogramValue.count()); diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 87bdd0f0bb8ba..322267a14d32f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -319,35 +319,42 @@ protected void masterOperation( return; } // 3. Create downsample index - createDownsampleIndex(downsampleIndexName, sourceIndexMetadata, mapping, request, ActionListener.wrap(createIndexResp -> { - if (createIndexResp.isAcknowledged()) { - performShardDownsampling( - request, - listener, - sourceIndexMetadata, - downsampleIndexName, - parentTask, - metricFields, - labelFields - ); - } else { - listener.onFailure(new ElasticsearchException("Failed to create downsample index [" + downsampleIndexName + "]")); - } - }, e -> { - if (e instanceof ResourceAlreadyExistsException) { - performShardDownsampling( - request, - listener, - sourceIndexMetadata, - downsampleIndexName, - parentTask, - metricFields, - labelFields - ); - } else { - listener.onFailure(e); - } - })); + createDownsampleIndex( + clusterService.getSettings(), + downsampleIndexName, + sourceIndexMetadata, + mapping, + request, + ActionListener.wrap(createIndexResp -> { + if (createIndexResp.isAcknowledged()) { + performShardDownsampling( + request, + listener, + sourceIndexMetadata, + downsampleIndexName, + parentTask, + metricFields, + labelFields + ); + } else { + listener.onFailure(new ElasticsearchException("Failed to create downsample index [" + downsampleIndexName + "]")); + } + }, e -> { + if (e instanceof ResourceAlreadyExistsException) { + performShardDownsampling( + request, + listener, + sourceIndexMetadata, + downsampleIndexName, + parentTask, + metricFields, + labelFields + ); + } else { + listener.onFailure(e); + } + }) + ); }, listener::onFailure)); } @@ -714,6 +721,7 @@ private static void addDynamicTemplates(final XContentBuilder builder) throws IO } private void createDownsampleIndex( + Settings settings, String downsampleIndexName, IndexMetadata sourceIndexMetadata, String mapping, @@ -729,10 +737,11 @@ private void createDownsampleIndex( * We should note that there is a risk of losing a node during the downsample process. In this * case downsample will fail. */ + int numberOfReplicas = settings.getAsInt(Downsample.DOWNSAMPLE_MIN_NUMBER_OF_REPLICAS_NAME, 0); Settings.Builder builder = Settings.builder() .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, sourceIndexMetadata.getNumberOfShards()) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, String.valueOf(numberOfReplicas)) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "-1") .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.STARTED); if (sourceIndexMetadata.getSettings().hasValue(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey())) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index ecbddb2d52742..abc811afa8be4 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; @@ -88,6 +89,9 @@ public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { private String getEnrichIndexKey(SearchRequest searchRequest) { String alias = searchRequest.indices()[0]; IndexAbstraction ia = metadata.getIndicesLookup().get(alias); + if (ia == null) { + throw new IndexNotFoundException("no generated enrich index [" + alias + "]"); + } return ia.getIndices().get(0).getName(); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java index b09baad222baf..735d68f61416e 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -21,6 +22,7 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -183,4 +185,22 @@ public void testDeepCopy() { assertArrayEquals(new byte[] { 1, 2, 3 }, (byte[]) result.get("embedded_object")); } + public void testEnrichIndexNotExist() { + // Emulate cluster metadata: + var metadata = Metadata.builder().build(); + + // Emulated search request on a non-exist enrich index that an enrich processor could generate + var searchRequest = new SearchRequest(EnrichPolicy.getBaseName("policy-enrich-index-not-generated")).source( + new SearchSourceBuilder().query(new MatchQueryBuilder("test", "query")) + ); + // Emulated search response (content doesn't matter, since it isn't used, it just a cache entry) + List> searchResponse = List.of(Map.of("test", "entry")); + + EnrichCache enrichCache = new EnrichCache(1); + enrichCache.setMetadata(metadata); + + IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> enrichCache.put(searchRequest, searchResponse)); + assertThat(e.getMessage(), containsString("no generated enrich index [.enrich-policy-enrich-index-not-generated]")); + } + } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index d8e582c9fb880..9e62ed225ccf0 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; @@ -29,7 +30,6 @@ import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; @@ -112,7 +112,7 @@ public static void afterClass() { public void testRunner() throws Exception { final String sourceIndex = "source-index"; - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "field1": "value1", "field2": 2, @@ -196,7 +196,7 @@ public void testRunner() throws Exception { public void testRunnerGeoMatchType() throws Exception { final String sourceIndex = "source-index"; - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" {"location":"POINT(10.0 10.0)","zipcode":90210}""", XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)) .actionGet(); assertEquals(RestStatus.CREATED, indexRequest.status()); @@ -283,7 +283,7 @@ public void testRunnerDoubleRangeMatchType() throws Exception { private void testNumberRangeMatchType(String rangeType) throws Exception { final String sourceIndex = "source-index"; createIndex(sourceIndex, Settings.EMPTY, "_doc", "range", "type=" + rangeType + "_range"); - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" {"range":{"gt":1,"lt":10},"zipcode":90210}""", XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)) .actionGet(); assertEquals(RestStatus.CREATED, indexRequest.status()); @@ -366,7 +366,7 @@ private GetIndexResponse getGetIndexResponseAndCheck(String createdEnrichIndex) public void testRunnerRangeTypeWithIpRange() throws Exception { final String sourceIndexName = "source-index"; createIndex(sourceIndexName, Settings.EMPTY, "_doc", "subnet", "type=ip_range"); - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndexName).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndexName).id("id").source(""" {"subnet":"10.0.0.0/8","department":"research"}""", XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)) .actionGet(); assertEquals(RestStatus.CREATED, indexRequest.status()); @@ -450,7 +450,7 @@ public void testRunnerMultiSource() throws Exception { int numberOfSourceIndices = 3; for (int idx = 0; idx < numberOfSourceIndices; idx++) { final String sourceIndex = baseSourceName + idx; - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(sourceIndex).id(randomAlphaOfLength(10)).source(Strings.format(""" { "idx": %s, @@ -554,7 +554,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { String collidingDocId = randomAlphaOfLength(10); for (int idx = 0; idx < numberOfSourceIndices; idx++) { final String sourceIndex = baseSourceName + idx; - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(sourceIndex).id(collidingDocId).routing(collidingDocId + idx).source(Strings.format(""" { "idx": %s, @@ -673,7 +673,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { int numberOfSourceIndices = 3; for (int idx = 0; idx < numberOfSourceIndices; idx++) { final String sourceIndex = baseSourceName + idx; - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(sourceIndex).id(randomAlphaOfLength(10)).source(Strings.format(""" { "idx": %s, @@ -983,7 +983,7 @@ public void testRunnerObjectSourceMapping() throws Exception { CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(sourceIndex) .id("id") .source(""" @@ -1092,7 +1092,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(sourceIndex) .id("id") .source(""" @@ -1201,7 +1201,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "data": { "subnet": "10.0.0.0/8", @@ -1316,7 +1316,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "data": { "fields": { @@ -1440,7 +1440,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "data": { "fields": { @@ -1564,7 +1564,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "data": { "fields": { @@ -1707,7 +1707,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { CreateIndexResponse createResponse = indicesAdmin().create(new CreateIndexRequest(sourceIndex).mapping(mappingBuilder)).actionGet(); assertTrue(createResponse.isAcknowledged()); - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(sourceIndex) .id("id") .source(""" @@ -1787,7 +1787,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { public void testRunnerWithForceMergeRetry() throws Exception { final String sourceIndex = "source-index"; - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "field1": "value1", "field2": 2, @@ -1885,7 +1885,7 @@ protected void ensureSingleSegment(String destinationIndexName, int attempt) { if (attempt == 1) { // Put and flush a document to increase the number of segments, simulating not // all segments were merged on the first try. - IndexResponse indexRequest = client().index( + DocWriteResponse indexRequest = client().index( new IndexRequest().index(createdEnrichIndex) .source(unmergedDocument) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -1957,7 +1957,7 @@ protected void ensureSingleSegment(String destinationIndexName, int attempt) { public void testRunnerCancel() throws Exception { final String sourceIndex = "source-index"; - IndexResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" + DocWriteResponse indexRequest = client().index(new IndexRequest().index(sourceIndex).id("id").source(""" { "field1": "value1", "field2": 2, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java index b5e810e30d125..67599e565b816 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java @@ -16,10 +16,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -315,7 +313,7 @@ public RequestBuilder clientAddress(String clientAddress) { } } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { public static Response ACCEPTED = new Response(true); public static Response readFromStreamInput(StreamInput in) throws IOException { @@ -379,11 +377,6 @@ protected void addFieldsToXContent(XContentBuilder builder, Params params) throw } - @Override - public RestStatus status() { - return RestStatus.ACCEPTED; - } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "post_analytics_event_response", false, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 06296eddcd8f3..6b051a4104873 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -106,7 +104,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - public static class Response extends AcknowledgedResponse implements StatusToXContentObject { + public static class Response extends AcknowledgedResponse implements ToXContentObject { public static final ParseField COLLECTION_NAME_FIELD = new ParseField("name"); @@ -122,11 +120,6 @@ public Response(boolean acknowledged, String name) { this.name = name; } - @Override - public RestStatus status() { - return RestStatus.CREATED; - } - public String getName() { return name; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java index 4f2b25bee4d0a..0e3bb7150c45e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java @@ -13,9 +13,10 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler; @@ -48,7 +49,11 @@ public List routes() { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) { PostAnalyticsEventAction.Request request = buidRequest(restRequest); - return channel -> client.execute(PostAnalyticsEventAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute( + PostAnalyticsEventAction.INSTANCE, + request, + new RestToXContentListener<>(channel, r -> RestStatus.ACCEPTED) + ); } private InetAddress getClientAddress(RestRequest restRequest, Map> headers) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java index fee4cef9a4946..7fbdcc116e617 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java @@ -11,9 +11,10 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler; import org.elasticsearch.xpack.application.utils.LicenseUtils; @@ -45,7 +46,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeC return channel -> client.execute( PutAnalyticsCollectionAction.INSTANCE, request, - new RestStatusToXContentListener<>(channel, _r -> location) + new RestToXContentListener<>(channel, r -> RestStatus.CREATED, _r -> location) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index de823ab2a6ce2..01807fd986947 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; @@ -232,7 +231,7 @@ private List parseCriteria(List> rawCrite * @param queryRuleset The query ruleset object. * @param listener The action listener to invoke on response/failure. */ - public void putQueryRuleset(QueryRuleset queryRuleset, ActionListener listener) { + public void putQueryRuleset(QueryRuleset queryRuleset, ActionListener listener) { try { validateQueryRuleset(queryRuleset); final IndexRequest indexRequest = new IndexRequest(QUERY_RULES_ALIAS_NAME).opType(DocWriteRequest.OpType.INDEX) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java index 2017fc2be4e3f..f352fe4d73479 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -104,7 +102,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { public static final ParseField RESULT_FIELD = new ParseField("results"); @@ -129,11 +127,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return queryPage.toXContent(builder, params); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - public QueryPage queryPage() { return queryPage; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java index af87e6efbdb0a..d29df284f8660 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/PutQueryRulesetAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -125,7 +124,7 @@ public String toString() { } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { final DocWriteResponse.Result result; @@ -151,7 +150,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - @Override public RestStatus status() { return switch (result) { case CREATED -> RestStatus.CREATED; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java index 2866dd5bb635b..a43ac70327e77 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -46,11 +45,10 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeC restRequest.content(), restRequest.getXContentType() ); - return channel -> client.execute(PutQueryRulesetAction.INSTANCE, request, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(PutQueryRulesetAction.Response response) { - return response.status(); - } - }); + return channel -> client.execute( + PutQueryRulesetAction.INSTANCE, + request, + new RestToXContentListener<>(channel, PutQueryRulesetAction.Response::status, r -> null) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 08b1e4f90c419..51b55a0a1c033 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; @@ -226,7 +225,7 @@ private static String getSearchAliasName(SearchApplication app) { * @param create If true, the search application must not already exist * @param listener The action listener to invoke on response/failure. */ - public void putSearchApplication(SearchApplication app, boolean create, ActionListener listener) { + public void putSearchApplication(SearchApplication app, boolean create, ActionListener listener) { createOrUpdateAlias(app, new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { @@ -283,7 +282,7 @@ private IndicesAliasesRequestBuilder updateAliasIndices(Set currentAlias return aliasesRequestBuilder; } - private void updateSearchApplication(SearchApplication app, boolean create, ActionListener listener) { + private void updateSearchApplication(SearchApplication app, boolean create, ActionListener listener) { try (ReleasableBytesStreamOutput buffer = new ReleasableBytesStreamOutput(0, bigArrays.withCircuitBreaking())) { try (XContentBuilder source = XContentFactory.jsonBuilder(buffer)) { source.startObject() diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java index 6b7588730e058..f44fa2a7b67e8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/ListSearchApplicationAction.java @@ -15,9 +15,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.Nullable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -124,7 +122,7 @@ public static Request parse(XContentParser parser) { } } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { public static final ParseField RESULT_FIELD = new ParseField("results"); @@ -149,11 +147,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return queryPage.toXContent(builder, params); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - public QueryPage queryPage() { return queryPage; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java index 439b7c6633136..78ada27534892 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -133,7 +132,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { final DocWriteResponse.Result result; @@ -159,7 +158,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - @Override public RestStatus status() { return switch (result) { case CREATED -> RestStatus.CREATED; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationAction.java index f88f24272ef14..cd84c85bc14e8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -47,11 +46,10 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeC restRequest.content(), restRequest.getXContentType() ); - return channel -> client.execute(PutSearchApplicationAction.INSTANCE, request, new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(PutSearchApplicationAction.Response response) { - return response.status(); - } - }); + return channel -> client.execute( + PutSearchApplicationAction.INSTANCE, + request, + new RestToXContentListener<>(channel, PutSearchApplicationAction.Response::status, r -> null) + ); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java index 1e8fa5953606b..2c18a866d684a 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -75,7 +75,7 @@ public void testUpdateQueryRuleset() throws Exception { Map.of("ids", List.of("id1", "id2")) ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", Collections.singletonList(myQueryRule1)); - IndexResponse resp = awaitPutQueryRuleset(myQueryRuleset); + DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME)); @@ -96,7 +96,7 @@ public void testUpdateQueryRuleset() throws Exception { Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id3"), Map.of("_index", "my_index2", "_id", "id4"))) ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2)); - IndexResponse newResp = awaitPutQueryRuleset(myQueryRuleset); + DocWriteResponse newResp = awaitPutQueryRuleset(myQueryRuleset); assertThat(newResp.status(), equalTo(RestStatus.OK)); assertThat(newResp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME)); QueryRuleset getQueryRuleset = awaitGetQueryRuleset(myQueryRuleset.id()); @@ -128,7 +128,7 @@ public void testListQueryRulesets() throws Exception { ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset_" + i, rules); - IndexResponse resp = awaitPutQueryRuleset(myQueryRuleset); + DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset); assertThat(resp.status(), equalTo(RestStatus.CREATED)); assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME)); } @@ -182,7 +182,7 @@ public void testDeleteQueryRuleset() throws Exception { Map.of("ids", List.of("id3", "id4")) ); final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2)); - IndexResponse resp = awaitPutQueryRuleset(myQueryRuleset); + DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME)); @@ -195,13 +195,13 @@ public void testDeleteQueryRuleset() throws Exception { expectThrows(ResourceNotFoundException.class, () -> awaitGetQueryRuleset("my_ruleset")); } - private IndexResponse awaitPutQueryRuleset(QueryRuleset queryRuleset) throws Exception { + private DocWriteResponse awaitPutQueryRuleset(QueryRuleset queryRuleset) throws Exception { CountDownLatch latch = new CountDownLatch(1); - final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); queryRulesIndexService.putQueryRuleset(queryRuleset, new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { resp.set(indexResponse); latch.countDown(); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java index 2bf35ef88ebc0..7891f5773d1a8 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexServiceTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -78,7 +78,7 @@ public void testCreateSearchApplication() throws Exception { null ); - IndexResponse resp = awaitPutSearchApplication(searchApp, true); + DocWriteResponse resp = awaitPutSearchApplication(searchApp, true); assertThat(resp.status(), equalTo(RestStatus.CREATED)); assertThat(resp.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); @@ -99,7 +99,7 @@ public void testCreateSearchApplication() throws Exception { SearchApplicationTemplate.DEFAULT_TEMPLATE ); - IndexResponse resp2 = awaitPutSearchApplication(searchApp2, true); + DocWriteResponse resp2 = awaitPutSearchApplication(searchApp2, true); assertThat(resp2.status(), equalTo(RestStatus.CREATED)); assertThat(resp2.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); @@ -133,7 +133,7 @@ public void testUpdateSearchApplication() throws Exception { System.currentTimeMillis(), SearchApplicationTestUtils.getRandomSearchApplicationTemplate() ); - IndexResponse resp = awaitPutSearchApplication(searchApp, false); + DocWriteResponse resp = awaitPutSearchApplication(searchApp, false); assertThat(resp.status(), equalTo(RestStatus.CREATED)); assertThat(resp.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); @@ -148,7 +148,7 @@ public void testUpdateSearchApplication() throws Exception { System.currentTimeMillis(), SearchApplicationTestUtils.getRandomSearchApplicationTemplate() ); - IndexResponse newResp = awaitPutSearchApplication(searchApp, false); + DocWriteResponse newResp = awaitPutSearchApplication(searchApp, false); assertThat(newResp.status(), equalTo(RestStatus.OK)); assertThat(newResp.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); SearchApplication getNewSearchApp = awaitGetSearchApplication(searchApp.name()); @@ -166,7 +166,7 @@ public void testListSearchApplication() throws Exception { System.currentTimeMillis(), null ); - IndexResponse resp = awaitPutSearchApplication(searchApp, false); + DocWriteResponse resp = awaitPutSearchApplication(searchApp, false); assertThat(resp.status(), equalTo(RestStatus.CREATED)); assertThat(resp.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); } @@ -208,7 +208,7 @@ public void testListSearchApplicationWithQuery() throws Exception { System.currentTimeMillis(), null ); - IndexResponse resp = awaitPutSearchApplication(app, false); + DocWriteResponse resp = awaitPutSearchApplication(app, false); assertThat(resp.status(), equalTo(RestStatus.CREATED)); assertThat(resp.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); } @@ -234,7 +234,7 @@ public void testDeleteSearchApplication() throws Exception { System.currentTimeMillis(), null ); - IndexResponse resp = awaitPutSearchApplication(app, false); + DocWriteResponse resp = awaitPutSearchApplication(app, false); assertThat(resp.status(), equalTo(RestStatus.CREATED)); assertThat(resp.getIndex(), equalTo(SEARCH_APPLICATION_CONCRETE_INDEX_NAME)); @@ -262,13 +262,13 @@ public void testDeleteSearchApplication() throws Exception { } } - private IndexResponse awaitPutSearchApplication(SearchApplication app, boolean create) throws Exception { + private DocWriteResponse awaitPutSearchApplication(SearchApplication app, boolean create) throws Exception { CountDownLatch latch = new CountDownLatch(1); - final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); searchAppService.putSearchApplication(app, create, new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { resp.set(indexResponse); latch.countDown(); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlGetAsyncStatusAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlGetAsyncStatusAction.java index 2059bf16e77e8..ee0129f025a07 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlGetAsyncStatusAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlGetAsyncStatusAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.async.GetAsyncStatusRequest; import java.util.List; @@ -33,6 +33,6 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { GetAsyncStatusRequest statusRequest = new GetAsyncStatusRequest(request.param("id")); - return channel -> client.execute(EqlAsyncGetStatusAction.INSTANCE, statusRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute(EqlAsyncGetStatusAction.INSTANCE, statusRequest, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java index af42d94c236f2..d5e38127cdec7 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java @@ -13,8 +13,6 @@ import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; -import java.util.BitSet; - import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; @@ -23,18 +21,13 @@ import static org.elasticsearch.compute.gen.Methods.appendMethod; import static org.elasticsearch.compute.gen.Methods.getMethod; import static org.elasticsearch.compute.gen.Types.ABSTRACT_CONVERT_FUNCTION_EVALUATOR; -import static org.elasticsearch.compute.gen.Types.BIG_ARRAYS; import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BYTES_REF; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_ARRAY; -import static org.elasticsearch.compute.gen.Types.BYTES_REF_BLOCK; +import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.VECTOR; -import static org.elasticsearch.compute.gen.Types.arrayBlockType; -import static org.elasticsearch.compute.gen.Types.arrayVectorType; import static org.elasticsearch.compute.gen.Types.blockType; -import static org.elasticsearch.compute.gen.Types.constantVectorType; import static org.elasticsearch.compute.gen.Types.vectorType; public class ConvertEvaluatorImplementer { @@ -79,6 +72,8 @@ private TypeSpec type() { builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.superclass(ABSTRACT_CONVERT_FUNCTION_EVALUATOR); + builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); + builder.addMethod(ctor()); builder.addMethod(name()); builder.addMethod(evalVector()); @@ -92,7 +87,9 @@ private MethodSpec ctor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); builder.addParameter(EXPRESSION_EVALUATOR, "field"); builder.addParameter(SOURCE, "source"); + builder.addParameter(DRIVER_CONTEXT, "driverContext"); builder.addStatement("super($N, $N)", "field", "source"); + builder.addStatement("this.driverContext = driverContext"); return builder.build(); } @@ -121,9 +118,9 @@ private MethodSpec evalVector() { { builder.beginControlFlow("try"); { - var constVectType = constantVectorType(resultType); + var constVectType = blockType(resultType); builder.addStatement( - "return new $T($N, positionCount).asBlock()", + "return driverContext.blockFactory().newConstant$TWith($N, positionCount)", constVectType, evalValueCall("vector", "0", scratchPadName) ); @@ -131,59 +128,34 @@ private MethodSpec evalVector() { builder.nextControlFlow("catch (Exception e)"); { builder.addStatement("registerException(e)"); - builder.addStatement("return Block.constantNullBlock(positionCount)"); + builder.addStatement("return Block.constantNullBlock(positionCount, driverContext.blockFactory())"); } builder.endControlFlow(); } builder.endControlFlow(); - builder.addStatement("$T nullsMask = null", BitSet.class); - if (resultType.equals(BYTES_REF)) { - builder.addStatement( - "$T values = new $T(positionCount, $T.NON_RECYCLING_INSTANCE)", // TODO: see note in MvEvaluatorImplementer - BYTES_REF_ARRAY, - BYTES_REF_ARRAY, - BIG_ARRAYS - ); - } else { - builder.addStatement("$T[] values = new $T[positionCount]", resultType, resultType); - } + ClassName returnBlockType = blockType(resultType); + builder.addStatement( + "$T.Builder builder = $T.newBlockBuilder(positionCount, driverContext.blockFactory())", + returnBlockType, + returnBlockType + ); builder.beginControlFlow("for (int p = 0; p < positionCount; p++)"); { builder.beginControlFlow("try"); { - if (resultType.equals(BYTES_REF)) { - builder.addStatement("values.append($N)", evalValueCall("vector", "p", scratchPadName)); - } else { - builder.addStatement("values[p] = $N", evalValueCall("vector", "p", scratchPadName)); - } + builder.addStatement("builder.$L($N)", appendMethod(resultType), evalValueCall("vector", "p", scratchPadName)); } builder.nextControlFlow("catch (Exception e)"); { builder.addStatement("registerException(e)"); - builder.beginControlFlow("if (nullsMask == null)"); - { - builder.addStatement("nullsMask = new BitSet(positionCount)"); - } - builder.endControlFlow(); - builder.addStatement("nullsMask.set(p)"); - if (resultType.equals(BYTES_REF)) { - builder.addStatement("values.append($T.NULL_VALUE)", BYTES_REF_BLOCK); - } + builder.addStatement("builder.appendNull()"); } builder.endControlFlow(); } builder.endControlFlow(); - builder.addStatement( - """ - return nullsMask == null - ? new $T(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new $T(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED)""", - arrayVectorType(resultType), - arrayBlockType(resultType) - ); + builder.addStatement("return builder.build()"); return builder.build(); } @@ -196,7 +168,11 @@ private MethodSpec evalBlock() { builder.addStatement("$T block = ($T) b", blockType, blockType); builder.addStatement("int positionCount = block.getPositionCount()"); TypeName resultBlockType = blockType(resultType); - builder.addStatement("$T.Builder builder = $T.newBlockBuilder(positionCount)", resultBlockType, resultBlockType); + builder.addStatement( + "$T.Builder builder = $T.newBlockBuilder(positionCount, driverContext.blockFactory())", + resultBlockType, + resultBlockType + ); String scratchPadName = null; if (argumentType.equals(BYTES_REF)) { scratchPadName = "scratchPad"; diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index ecd59edb03286..713ce07b7f7ab 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -32,11 +32,13 @@ import static org.elasticsearch.compute.gen.Methods.appendMethod; import static org.elasticsearch.compute.gen.Methods.getMethod; -import static org.elasticsearch.compute.gen.Types.BLOCK; +import static org.elasticsearch.compute.gen.Types.BLOCK_REF; import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.PAGE; +import static org.elasticsearch.compute.gen.Types.RELEASABLE; +import static org.elasticsearch.compute.gen.Types.RELEASABLES; import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.WARNINGS; import static org.elasticsearch.compute.gen.Types.blockType; @@ -111,17 +113,18 @@ private MethodSpec ctor() { private MethodSpec eval() { MethodSpec.Builder builder = MethodSpec.methodBuilder("eval").addAnnotation(Override.class); - builder.addModifiers(Modifier.PUBLIC).returns(BLOCK).addParameter(PAGE, "page"); + builder.addModifiers(Modifier.PUBLIC).returns(BLOCK_REF).addParameter(PAGE, "page"); processFunction.args.stream().forEach(a -> a.evalToBlock(builder)); String invokeBlockEval = invokeRealEval(true); processFunction.args.stream().forEach(a -> a.resolveVectors(builder, invokeBlockEval)); builder.addStatement(invokeRealEval(false)); + processFunction.args.stream().forEach(a -> a.closeEvalToBlock(builder)); return builder.build(); } private String invokeRealEval(boolean blockStyle) { - StringBuilder builder = new StringBuilder("return eval(page.getPositionCount()"); + StringBuilder builder = new StringBuilder("return Block.Ref.floating(eval(page.getPositionCount()"); String params = processFunction.args.stream() .map(a -> a.paramName(blockStyle)) .filter(a -> a != null) @@ -134,6 +137,7 @@ private String invokeRealEval(boolean blockStyle) { if (processFunction.resultDataType(blockStyle).simpleName().endsWith("Vector")) { builder.append(".asBlock()"); } + builder.append(")"); return builder.toString(); } @@ -264,11 +268,17 @@ private interface ProcessFunctionArg { void implementCtor(MethodSpec.Builder builder); /** - * Emits code to evaluate this parameter to a Block or array of Blocks. - * Noop if the parameter is {@link Fixed}. + * Emits code to evaluate this parameter to a Block.Ref or array of Block.Refs + * and begins a {@code try} block for those refs. Noop if the parameter is {@link Fixed}. */ void evalToBlock(MethodSpec.Builder builder); + /** + * Closes the {@code try} block emitted by {@link #evalToBlock} if it made one. + * Noop otherwise. + */ + void closeEvalToBlock(MethodSpec.Builder builder); + /** * Emits code to check if this parameter is a vector or a block, and to * call the block flavored evaluator if this is a block. Noop if the @@ -336,11 +346,16 @@ public void implementCtor(MethodSpec.Builder builder) { @Override public void evalToBlock(MethodSpec.Builder builder) { TypeName blockType = blockType(type); - builder.addStatement("Block $LUncastBlock = $L.eval(page)", name, name); - builder.beginControlFlow("if ($LUncastBlock.areAllValuesNull())", name); - builder.addStatement("return Block.constantNullBlock(page.getPositionCount())"); + builder.beginControlFlow("try (Block.Ref $LRef = $L.eval(page))", name, name); + builder.beginControlFlow("if ($LRef.block().areAllValuesNull())", name); + builder.addStatement("return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount()))"); + builder.endControlFlow(); + builder.addStatement("$T $LBlock = ($T) $LRef.block()", blockType, name, blockType, name); + } + + @Override + public void closeEvalToBlock(MethodSpec.Builder builder) { builder.endControlFlow(); - builder.addStatement("$T $LBlock = ($T) $LUncastBlock", blockType, name, blockType, name); } @Override @@ -432,18 +447,26 @@ public void implementCtor(MethodSpec.Builder builder) { @Override public void evalToBlock(MethodSpec.Builder builder) { TypeName blockType = blockType(componentType); + builder.addStatement("Block.Ref[] $LRefs = new Block.Ref[$L.length]", name, name); + builder.beginControlFlow("try ($T $LRelease = $T.wrap($LRefs))", RELEASABLE, name, RELEASABLES, name); builder.addStatement("$T[] $LBlocks = new $T[$L.length]", blockType, name, blockType, name); builder.beginControlFlow("for (int i = 0; i < $LBlocks.length; i++)", name); { - builder.addStatement("Block block = $L[i].eval(page)", name); + builder.addStatement("$LRefs[i] = $L[i].eval(page)", name, name); + builder.addStatement("Block block = $LRefs[i].block()", name); builder.beginControlFlow("if (block.areAllValuesNull())"); - builder.addStatement("return Block.constantNullBlock(page.getPositionCount())"); + builder.addStatement("return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount()))"); builder.endControlFlow(); builder.addStatement("$LBlocks[i] = ($T) block", name, blockType); } builder.endControlFlow(); } + @Override + public void closeEvalToBlock(MethodSpec.Builder builder) { + builder.endControlFlow(); + } + @Override public void resolveVectors(MethodSpec.Builder builder, String invokeBlockEval) { TypeName vectorType = vectorType(componentType); @@ -541,6 +564,11 @@ public void evalToBlock(MethodSpec.Builder builder) { // nothing to do } + @Override + public void closeEvalToBlock(MethodSpec.Builder builder) { + // nothing to do + } + @Override public void resolveVectors(MethodSpec.Builder builder, String invokeBlockEval) { // nothing to do @@ -609,6 +637,11 @@ public void evalToBlock(MethodSpec.Builder builder) { // nothing to do } + @Override + public void closeEvalToBlock(MethodSpec.Builder builder) { + // nothing to do + } + @Override public void resolveVectors(MethodSpec.Builder builder, String invokeBlockEval) { // nothing to do diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java index e32cee86dc6e1..a6668795c592a 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java @@ -26,6 +26,7 @@ import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.type.TypeMirror; +import javax.tools.Diagnostic; /** * Glues the {@link EvaluatorImplementer} into the jdk's annotation @@ -70,48 +71,63 @@ public boolean process(Set set, RoundEnvironment roundEnv for (Element evaluatorMethod : roundEnvironment.getElementsAnnotatedWith(ann)) { Evaluator evaluatorAnn = evaluatorMethod.getAnnotation(Evaluator.class); if (evaluatorAnn != null) { - AggregatorProcessor.write( - evaluatorMethod, - "evaluator", - new EvaluatorImplementer( - env.getElementUtils(), - env.getTypeUtils(), - (ExecutableElement) evaluatorMethod, - evaluatorAnn.extraName(), - warnExceptions(evaluatorMethod) - ).sourceFile(), - env - ); + try { + AggregatorProcessor.write( + evaluatorMethod, + "evaluator", + new EvaluatorImplementer( + env.getElementUtils(), + env.getTypeUtils(), + (ExecutableElement) evaluatorMethod, + evaluatorAnn.extraName(), + warnExceptions(evaluatorMethod) + ).sourceFile(), + env + ); + } catch (Exception e) { + env.getMessager().printMessage(Diagnostic.Kind.ERROR, "failed to build " + evaluatorMethod.getEnclosingElement()); + throw e; + } } MvEvaluator mvEvaluatorAnn = evaluatorMethod.getAnnotation(MvEvaluator.class); if (mvEvaluatorAnn != null) { - AggregatorProcessor.write( - evaluatorMethod, - "evaluator", - new MvEvaluatorImplementer( - env.getElementUtils(), - (ExecutableElement) evaluatorMethod, - mvEvaluatorAnn.extraName(), - mvEvaluatorAnn.finish(), - mvEvaluatorAnn.single(), - mvEvaluatorAnn.ascending(), - warnExceptions(evaluatorMethod) - ).sourceFile(), - env - ); + try { + AggregatorProcessor.write( + evaluatorMethod, + "evaluator", + new MvEvaluatorImplementer( + env.getElementUtils(), + (ExecutableElement) evaluatorMethod, + mvEvaluatorAnn.extraName(), + mvEvaluatorAnn.finish(), + mvEvaluatorAnn.single(), + mvEvaluatorAnn.ascending(), + warnExceptions(evaluatorMethod) + ).sourceFile(), + env + ); + } catch (Exception e) { + env.getMessager().printMessage(Diagnostic.Kind.ERROR, "failed to build " + evaluatorMethod.getEnclosingElement()); + throw e; + } } ConvertEvaluator convertEvaluatorAnn = evaluatorMethod.getAnnotation(ConvertEvaluator.class); if (convertEvaluatorAnn != null) { - AggregatorProcessor.write( - evaluatorMethod, - "evaluator", - new ConvertEvaluatorImplementer( - env.getElementUtils(), - (ExecutableElement) evaluatorMethod, - convertEvaluatorAnn.extraName() - ).sourceFile(), - env - ); + try { + AggregatorProcessor.write( + evaluatorMethod, + "evaluator", + new ConvertEvaluatorImplementer( + env.getElementUtils(), + (ExecutableElement) evaluatorMethod, + convertEvaluatorAnn.extraName() + ).sourceFile(), + env + ); + } catch (Exception e) { + env.getMessager().printMessage(Diagnostic.Kind.ERROR, "failed to build " + evaluatorMethod.getEnclosingElement()); + throw e; + } } } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 86ae6d3f46789..ce14d9ac31d49 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -31,12 +31,11 @@ import static org.elasticsearch.compute.gen.Methods.getMethod; import static org.elasticsearch.compute.gen.Types.ABSTRACT_MULTIVALUE_FUNCTION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.ABSTRACT_NULLABLE_MULTIVALUE_FUNCTION_EVALUATOR; -import static org.elasticsearch.compute.gen.Types.BLOCK; +import static org.elasticsearch.compute.gen.Types.BLOCK_REF; import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.SOURCE; -import static org.elasticsearch.compute.gen.Types.VECTOR; import static org.elasticsearch.compute.gen.Types.WARNINGS; import static org.elasticsearch.compute.gen.Types.blockType; import static org.elasticsearch.compute.gen.Types.vectorType; @@ -180,7 +179,7 @@ private MethodSpec evalShell( Consumer body ) { MethodSpec.Builder builder = MethodSpec.methodBuilder(name); - builder.returns(nullable ? BLOCK : VECTOR).addParameter(BLOCK, "fieldVal"); + builder.returns(BLOCK_REF).addParameter(BLOCK_REF, "ref"); if (override) { builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); } else { @@ -191,7 +190,8 @@ private MethodSpec evalShell( preflight.accept(builder); - builder.addStatement("$T v = ($T) fieldVal", blockType, blockType); + builder.beginControlFlow("try (ref)"); + builder.addStatement("$T v = ($T) ref.block()", blockType, blockType); builder.addStatement("int positionCount = v.getPositionCount()"); if (nullable) { TypeName resultBlockType = blockType(resultType); @@ -251,7 +251,8 @@ private MethodSpec evalShell( } builder.endControlFlow(); - builder.addStatement("return builder.build()"); + builder.addStatement("return Block.Ref.floating(builder.build()$L)", nullable ? "" : ".asBlock()"); + builder.endControlFlow(); return builder.build(); } @@ -261,8 +262,8 @@ private MethodSpec eval(String name, boolean nullable) { if (ascendingFunction == null) { return; } - builder.beginControlFlow("if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING)"); - builder.addStatement("return $L(fieldVal)", name.replace("eval", "evalAscending")); + builder.beginControlFlow("if (ref.block().mvSortedAscending())"); + builder.addStatement("return $L(ref)", name.replace("eval", "evalAscending")); builder.endControlFlow(); }, builder -> { builder.addStatement("int first = v.getFirstValueIndex(p)"); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index dc59ecf757d20..46fb6af22e79b 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -32,6 +32,7 @@ public class Types { static final ClassName PAGE = ClassName.get(DATA_PACKAGE, "Page"); static final ClassName BLOCK = ClassName.get(DATA_PACKAGE, "Block"); static final TypeName BLOCK_ARRAY = ArrayTypeName.of(BLOCK); + static final ClassName BLOCK_REF = ClassName.get(DATA_PACKAGE, "Block", "Ref"); static final ClassName VECTOR = ClassName.get(DATA_PACKAGE, "Vector"); static final ClassName BIG_ARRAYS = ClassName.get("org.elasticsearch.common.util", "BigArrays"); @@ -117,6 +118,7 @@ public class Types { static final ClassName BYTES_REF = ClassName.get("org.apache.lucene.util", "BytesRef"); + static final ClassName RELEASABLE = ClassName.get("org.elasticsearch.core", "Releasable"); static final ClassName RELEASABLES = ClassName.get("org.elasticsearch.core", "Releasables"); static ClassName blockType(TypeName elementType) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index d8a5e471aaf84..b6e36e698355b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -75,6 +75,7 @@ public BooleanBlock expand() { public static long ramBytesEstimated(boolean[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index a7d397fcfb98e..b52d8cf19b3d2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB BooleanBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new boolean[initialSize]; } @@ -181,19 +183,18 @@ public BooleanBlock build() { finish(); BooleanBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - block = new ConstantBooleanVector(values[0], 1, blockFactory).asBlock(); + block = blockFactory.newConstantBooleanBlockWith(values[0], 1, estimatedBytes); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - block = new BooleanArrayVector(values, positionCount, blockFactory).asBlock(); + block = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - block = new BooleanArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + block = blockFactory.newBooleanArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, estimatedBytes); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java index 45c74ee6e06d4..effb90267702f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java @@ -49,17 +49,17 @@ protected void growValuesArray(int newSize) { @Override public BooleanVector build() { + finish(); BooleanVector vector; if (valueCount == 1) { - vector = new ConstantBooleanVector(values[0], 1, blockFactory); + vector = blockFactory.newConstantBooleanBlockWith(values[0], 1, estimatedBytes).asVector(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } - vector = new BooleanArrayVector(values, valueCount, blockFactory); + vector = blockFactory.newBooleanArrayVector(values, valueCount, estimatedBytes); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index 30146d4e55c02..9a07816666efe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -18,6 +18,7 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { private final BlockFactory blockFactory; private final boolean[] values; + private final long preAdjustedBytes; /** * The next value to write into. {@code -1} means the vector has already * been built. @@ -25,7 +26,8 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { private int nextIndex; BooleanVectorFixedBuilder(int size, BlockFactory blockFactory) { - blockFactory.adjustBreaker(ramBytesUsed(size), false); + preAdjustedBytes = ramBytesUsed(size); + blockFactory.adjustBreaker(preAdjustedBytes, false); this.blockFactory = blockFactory; this.values = new boolean[size]; } @@ -54,8 +56,16 @@ public BooleanVector build() { } nextIndex = -1; if (values.length == 1) { - return new ConstantBooleanVector(values[0], 1, blockFactory); + return blockFactory.newConstantBooleanBlockWith(values[0], 1, preAdjustedBytes).asVector(); + } + return blockFactory.newBooleanArrayVector(values, values.length, preAdjustedBytes); + } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-preAdjustedBytes, false); } - return new BooleanArrayVector(values, values.length, blockFactory); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index e4ee70cd27a47..db5b5d3fcf804 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -77,6 +77,7 @@ public BytesRefBlock expand() { public static long ramBytesEstimated(BytesRefArray values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override @@ -115,7 +116,7 @@ public void close() { throw new IllegalStateException("can't release already released block [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index c8f5276a99db5..1692bfc59358a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -85,7 +85,7 @@ public String toString() { @Override public void close() { - blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 23c18d2a9ca6e..a60b26667eb79 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -193,19 +193,42 @@ public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { public BytesRefBlock build() { finish(); BytesRefBlock block; + assert estimatedBytes == 0 || firstValueIndexes != null; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); Releasables.closeExpectNoException(values); } else { - estimatedBytes += values.ramBytesUsed(); if (isDense() && singleValued()) { block = new BytesRefArrayVector(values, positionCount, blockFactory).asBlock(); } else { block = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + values = null; + built(); return block; } + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java index f37ffb2a7e28a..5ea9a2b7d0184 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java @@ -54,16 +54,40 @@ protected void growValuesArray(int newSize) { @Override public BytesRefVector build() { + finish(); BytesRefVector vector; + assert estimatedBytes == 0; if (valueCount == 1) { vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed(), false); Releasables.closeExpectNoException(values); } else { - estimatedBytes = values.ramBytesUsed(); vector = new BytesRefArrayVector(values, valueCount, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - values.bigArraysRamBytesUsed(), false); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + values = null; + built(); return vector; } + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index b0de974a85c24..675952a8d6a85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -75,6 +75,7 @@ public DoubleBlock expand() { public static long ramBytesEstimated(double[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index a97f58f3924b1..0267f07f20c7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo DoubleBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new double[initialSize]; } @@ -181,19 +183,18 @@ public DoubleBlock build() { finish(); DoubleBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - block = new ConstantDoubleVector(values[0], 1, blockFactory).asBlock(); + block = blockFactory.newConstantDoubleBlockWith(values[0], 1, estimatedBytes); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - block = new DoubleArrayVector(values, positionCount, blockFactory).asBlock(); + block = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - block = new DoubleArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + block = blockFactory.newDoubleArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, estimatedBytes); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java index f92ec67aec012..f4e7be406e1ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java @@ -49,17 +49,17 @@ protected void growValuesArray(int newSize) { @Override public DoubleVector build() { + finish(); DoubleVector vector; if (valueCount == 1) { - vector = new ConstantDoubleVector(values[0], 1, blockFactory); + vector = blockFactory.newConstantDoubleBlockWith(values[0], 1, estimatedBytes).asVector(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } - vector = new DoubleArrayVector(values, valueCount, blockFactory); + vector = blockFactory.newDoubleArrayVector(values, valueCount, estimatedBytes); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index 83992ed71b720..57a423985b0ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -18,6 +18,7 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { private final BlockFactory blockFactory; private final double[] values; + private final long preAdjustedBytes; /** * The next value to write into. {@code -1} means the vector has already * been built. @@ -25,7 +26,8 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { private int nextIndex; DoubleVectorFixedBuilder(int size, BlockFactory blockFactory) { - blockFactory.adjustBreaker(ramBytesUsed(size), false); + preAdjustedBytes = ramBytesUsed(size); + blockFactory.adjustBreaker(preAdjustedBytes, false); this.blockFactory = blockFactory; this.values = new double[size]; } @@ -54,8 +56,16 @@ public DoubleVector build() { } nextIndex = -1; if (values.length == 1) { - return new ConstantDoubleVector(values[0], 1, blockFactory); + return blockFactory.newConstantDoubleBlockWith(values[0], 1, preAdjustedBytes).asVector(); + } + return blockFactory.newDoubleArrayVector(values, values.length, preAdjustedBytes); + } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-preAdjustedBytes, false); } - return new DoubleArrayVector(values, values.length, blockFactory); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 7a345941df019..4170009b89ab2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -75,6 +75,7 @@ public IntBlock expand() { public static long ramBytesEstimated(int[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 53d379d715c9b..98fce58fbfbfc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui IntBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new int[initialSize]; } @@ -181,19 +183,18 @@ public IntBlock build() { finish(); IntBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - block = new ConstantIntVector(values[0], 1, blockFactory).asBlock(); + block = blockFactory.newConstantIntBlockWith(values[0], 1, estimatedBytes); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - block = new IntArrayVector(values, positionCount, blockFactory).asBlock(); + block = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - block = new IntArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + block = blockFactory.newIntArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, estimatedBytes); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java index 0533d5463a4e7..09bbb32cefe79 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java @@ -49,17 +49,17 @@ protected void growValuesArray(int newSize) { @Override public IntVector build() { + finish(); IntVector vector; if (valueCount == 1) { - vector = new ConstantIntVector(values[0], 1, blockFactory); + vector = blockFactory.newConstantIntBlockWith(values[0], 1, estimatedBytes).asVector(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } - vector = new IntArrayVector(values, valueCount, blockFactory); + vector = blockFactory.newIntArrayVector(values, valueCount, estimatedBytes); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 19303b4024869..98565a6b94d4b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -18,6 +18,7 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { private final BlockFactory blockFactory; private final int[] values; + private final long preAdjustedBytes; /** * The next value to write into. {@code -1} means the vector has already * been built. @@ -25,7 +26,8 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { private int nextIndex; IntVectorFixedBuilder(int size, BlockFactory blockFactory) { - blockFactory.adjustBreaker(ramBytesUsed(size), false); + preAdjustedBytes = ramBytesUsed(size); + blockFactory.adjustBreaker(preAdjustedBytes, false); this.blockFactory = blockFactory; this.values = new int[size]; } @@ -54,8 +56,16 @@ public IntVector build() { } nextIndex = -1; if (values.length == 1) { - return new ConstantIntVector(values[0], 1, blockFactory); + return blockFactory.newConstantIntBlockWith(values[0], 1, preAdjustedBytes).asVector(); + } + return blockFactory.newIntArrayVector(values, values.length, preAdjustedBytes); + } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-preAdjustedBytes, false); } - return new IntArrayVector(values, values.length, blockFactory); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 21c6b445cd37d..778ec4294180c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -75,6 +75,7 @@ public LongBlock expand() { public static long ramBytesEstimated(long[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index a378b382ce31e..f2eff13562e1a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B LongBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new long[initialSize]; } @@ -181,19 +183,18 @@ public LongBlock build() { finish(); LongBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - block = new ConstantLongVector(values[0], 1, blockFactory).asBlock(); + block = blockFactory.newConstantLongBlockWith(values[0], 1, estimatedBytes); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - block = new LongArrayVector(values, positionCount, blockFactory).asBlock(); + block = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - block = new LongArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + block = blockFactory.newLongArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, estimatedBytes); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java index 6b2e9f1de7d51..eb4e54781a020 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java @@ -49,17 +49,17 @@ protected void growValuesArray(int newSize) { @Override public LongVector build() { + finish(); LongVector vector; if (valueCount == 1) { - vector = new ConstantLongVector(values[0], 1, blockFactory); + vector = blockFactory.newConstantLongBlockWith(values[0], 1, estimatedBytes).asVector(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } - vector = new LongArrayVector(values, valueCount, blockFactory); + vector = blockFactory.newLongArrayVector(values, valueCount, estimatedBytes); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 5414b7669f588..0849cbc760847 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -18,6 +18,7 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { private final BlockFactory blockFactory; private final long[] values; + private final long preAdjustedBytes; /** * The next value to write into. {@code -1} means the vector has already * been built. @@ -25,7 +26,8 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { private int nextIndex; LongVectorFixedBuilder(int size, BlockFactory blockFactory) { - blockFactory.adjustBreaker(ramBytesUsed(size), false); + preAdjustedBytes = ramBytesUsed(size); + blockFactory.adjustBreaker(preAdjustedBytes, false); this.blockFactory = blockFactory; this.values = new long[size]; } @@ -54,8 +56,16 @@ public LongVector build() { } nextIndex = -1; if (values.length == 1) { - return new ConstantLongVector(values[0], 1, blockFactory); + return blockFactory.newConstantLongBlockWith(values[0], 1, preAdjustedBytes).asVector(); + } + return blockFactory.newLongArrayVector(values, values.length, preAdjustedBytes); + } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-preAdjustedBytes, false); } - return new LongArrayVector(values, values.length, blockFactory); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java index 48aec38b800ce..8b84cfa78ffc4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java @@ -29,12 +29,14 @@ public class MultivalueDedupeBytesRef { * The choice of number has been experimentally derived. */ private static final int ALWAYS_COPY_MISSING = 20; // TODO BytesRef should try adding to the hash *first* and then comparing. + private final Block.Ref ref; private final BytesRefBlock block; private BytesRef[] work = new BytesRef[ArrayUtil.oversize(2, org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; private int w; - public MultivalueDedupeBytesRef(BytesRefBlock block) { - this.block = block; + public MultivalueDedupeBytesRef(Block.Ref ref) { + this.ref = ref; + this.block = (BytesRefBlock) ref.block(); // TODO very large numbers might want a hash based implementation - and for BytesRef that might not be that big fillWork(0, work.length); } @@ -43,9 +45,9 @@ public MultivalueDedupeBytesRef(BytesRefBlock block) { * Remove duplicate values from each position and write the results to a * {@link Block} using an adaptive algorithm based on the size of the input list. */ - public BytesRefBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockAdaptive() { + if (block.mvDeduplicated()) { + return ref; } BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -82,7 +84,7 @@ public BytesRefBlock dedupeToBlockAdaptive() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -91,9 +93,9 @@ public BytesRefBlock dedupeToBlockAdaptive() { * case complexity for larger. Prefer {@link #dedupeToBlockAdaptive} * which picks based on the number of elements at each position. */ - public BytesRefBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyAndSort() { + if (block.mvDeduplicated()) { + return ref; } BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -108,7 +110,7 @@ public BytesRefBlock dedupeToBlockUsingCopyAndSort() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -119,9 +121,9 @@ public BytesRefBlock dedupeToBlockUsingCopyAndSort() { * performance is dominated by the {@code n*log n} sort. Prefer * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ - public BytesRefBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyMissing() { + if (block.mvDeduplicated()) { + return ref; } BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -136,7 +138,7 @@ public BytesRefBlock dedupeToBlockUsingCopyMissing() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java index d30292f6fa32c..79c64b6a999cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java @@ -28,21 +28,23 @@ public class MultivalueDedupeDouble { * The choice of number has been experimentally derived. */ private static final int ALWAYS_COPY_MISSING = 110; + private final Block.Ref ref; private final DoubleBlock block; private double[] work = new double[ArrayUtil.oversize(2, Double.BYTES)]; private int w; - public MultivalueDedupeDouble(DoubleBlock block) { - this.block = block; + public MultivalueDedupeDouble(Block.Ref ref) { + this.ref = ref; + this.block = (DoubleBlock) ref.block(); } /** * Remove duplicate values from each position and write the results to a * {@link Block} using an adaptive algorithm based on the size of the input list. */ - public DoubleBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockAdaptive() { + if (block.mvDeduplicated()) { + return ref; } DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -79,7 +81,7 @@ public DoubleBlock dedupeToBlockAdaptive() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -88,9 +90,9 @@ public DoubleBlock dedupeToBlockAdaptive() { * case complexity for larger. Prefer {@link #dedupeToBlockAdaptive} * which picks based on the number of elements at each position. */ - public DoubleBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyAndSort() { + if (block.mvDeduplicated()) { + return ref; } DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -105,7 +107,7 @@ public DoubleBlock dedupeToBlockUsingCopyAndSort() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -116,9 +118,9 @@ public DoubleBlock dedupeToBlockUsingCopyAndSort() { * performance is dominated by the {@code n*log n} sort. Prefer * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ - public DoubleBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyMissing() { + if (block.mvDeduplicated()) { + return ref; } DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -133,7 +135,7 @@ public DoubleBlock dedupeToBlockUsingCopyMissing() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java index cda9308a7e6d2..291429d7ccf35 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java @@ -27,21 +27,23 @@ public class MultivalueDedupeInt { * The choice of number has been experimentally derived. */ private static final int ALWAYS_COPY_MISSING = 300; + private final Block.Ref ref; private final IntBlock block; private int[] work = new int[ArrayUtil.oversize(2, Integer.BYTES)]; private int w; - public MultivalueDedupeInt(IntBlock block) { - this.block = block; + public MultivalueDedupeInt(Block.Ref ref) { + this.ref = ref; + this.block = (IntBlock) ref.block(); } /** * Remove duplicate values from each position and write the results to a * {@link Block} using an adaptive algorithm based on the size of the input list. */ - public IntBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockAdaptive() { + if (block.mvDeduplicated()) { + return ref; } IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -78,7 +80,7 @@ public IntBlock dedupeToBlockAdaptive() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -87,9 +89,9 @@ public IntBlock dedupeToBlockAdaptive() { * case complexity for larger. Prefer {@link #dedupeToBlockAdaptive} * which picks based on the number of elements at each position. */ - public IntBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyAndSort() { + if (block.mvDeduplicated()) { + return ref; } IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -104,7 +106,7 @@ public IntBlock dedupeToBlockUsingCopyAndSort() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -115,9 +117,9 @@ public IntBlock dedupeToBlockUsingCopyAndSort() { * performance is dominated by the {@code n*log n} sort. Prefer * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ - public IntBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyMissing() { + if (block.mvDeduplicated()) { + return ref; } IntBlock.Builder builder = IntBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -132,7 +134,7 @@ public IntBlock dedupeToBlockUsingCopyMissing() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java index 0266131fba37c..a3c05be5c0fb6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java @@ -29,21 +29,23 @@ public class MultivalueDedupeLong { */ private static final int ALWAYS_COPY_MISSING = 300; + private final Block.Ref ref; private final LongBlock block; private long[] work = new long[ArrayUtil.oversize(2, Long.BYTES)]; private int w; - public MultivalueDedupeLong(LongBlock block) { - this.block = block; + public MultivalueDedupeLong(Block.Ref ref) { + this.ref = ref; + this.block = (LongBlock) ref.block(); } /** * Remove duplicate values from each position and write the results to a * {@link Block} using an adaptive algorithm based on the size of the input list. */ - public LongBlock dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockAdaptive() { + if (block.mvDeduplicated()) { + return ref; } LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -80,7 +82,7 @@ public LongBlock dedupeToBlockAdaptive() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -89,9 +91,9 @@ public LongBlock dedupeToBlockAdaptive() { * case complexity for larger. Prefer {@link #dedupeToBlockAdaptive} * which picks based on the number of elements at each position. */ - public LongBlock dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyAndSort() { + if (block.mvDeduplicated()) { + return ref; } LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -106,7 +108,7 @@ public LongBlock dedupeToBlockUsingCopyAndSort() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -117,9 +119,9 @@ public LongBlock dedupeToBlockUsingCopyAndSort() { * performance is dominated by the {@code n*log n} sort. Prefer * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ - public LongBlock dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyMissing() { + if (block.mvDeduplicated()) { + return ref; } LongBlock.Builder builder = LongBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -134,7 +136,7 @@ public LongBlock dedupeToBlockUsingCopyMissing() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java index 0bdc5ac620eb0..40fe7ffdde661 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForBoolean extractorFor(TopNEncoder encoder, boolean ascendin return new KeyExtractorForBoolean.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBoolean.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBoolean.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBoolean.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBoolean.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java index accce46f38e30..2f546a46aaeaf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -20,11 +19,11 @@ static KeyExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean ascendi return new KeyExtractorForBytesRef.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBytesRef.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBytesRef.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForBytesRef.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForBytesRef.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java index 2f2968da16d83..5e821b9e24db5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForDouble extractorFor(TopNEncoder encoder, boolean ascending return new KeyExtractorForDouble.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForDouble.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForDouble.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForDouble.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForDouble.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java index 400c43168277d..d4269a622f098 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForInt extractorFor(TopNEncoder encoder, boolean ascending, b return new KeyExtractorForInt.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForInt.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForInt.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForInt.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForInt.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java index 843efdd95471f..6a200efff529d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator.topn; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -19,11 +18,11 @@ static KeyExtractorForLong extractorFor(TopNEncoder encoder, boolean ascending, return new KeyExtractorForLong.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForLong.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorForLong.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorForLong.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorForLong.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java index 50cef0417dd45..3d568adc2b5ea 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; class ResultBuilderForBoolean implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForBoolean implements ResultBuilder { */ private boolean key; - ResultBuilderForBoolean(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForBoolean(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = BooleanBlock.newBlockBuilder(initialSize); + this.builder = BooleanBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public BooleanBlock build() { public String toString() { return "ResultBuilderForBoolean[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java index 55f324c931b67..e37f82f3363a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; class ResultBuilderForBytesRef implements ResultBuilder { @@ -24,10 +25,10 @@ class ResultBuilderForBytesRef implements ResultBuilder { */ private BytesRef key; - ResultBuilderForBytesRef(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForBytesRef(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { this.encoder = encoder; this.inKey = inKey; - this.builder = BytesRefBlock.newBlockBuilder(initialSize); + this.builder = BytesRefBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -67,4 +68,9 @@ public BytesRefBlock build() { public String toString() { return "ResultBuilderForBytesRef[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java index ed4a9b45d90dc..77c976c6e0085 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; class ResultBuilderForDouble implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForDouble implements ResultBuilder { */ private double key; - ResultBuilderForDouble(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForDouble(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = DoubleBlock.newBlockBuilder(initialSize); + this.builder = DoubleBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public DoubleBlock build() { public String toString() { return "ResultBuilderForDouble[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java index 2bcfc81107445..389ed3bc2e3c3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; class ResultBuilderForInt implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForInt implements ResultBuilder { */ private int key; - ResultBuilderForInt(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForInt(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = IntBlock.newBlockBuilder(initialSize); + this.builder = IntBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public IntBlock build() { public String toString() { return "ResultBuilderForInt[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java index 3ada85bf9d5c9..63ee9d35c59e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; class ResultBuilderForLong implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForLong implements ResultBuilder { */ private long key; - ResultBuilderForLong(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForLong(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = LongBlock.newBlockBuilder(initialSize); + this.builder = LongBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public LongBlock build() { public String toString() { return "ResultBuilderForLong[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 3104fb05280eb..195c5fff6142b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -6,7 +6,6 @@ */ module org.elasticsearch.compute { - uses org.elasticsearch.compute.data.BlockFactoryParameters; requires org.apache.lucene.core; requires org.elasticsearch.base; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java index 25ff4a2a3ab6a..c9374b78ba5ac 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java @@ -49,6 +49,7 @@ public static List intermediateStateDesc() { private final LongState state; private final List channels; + private final boolean countAll; public static CountAggregatorFunction create(List inputChannels) { return new CountAggregatorFunction(inputChannels, new LongState()); @@ -57,6 +58,8 @@ public static CountAggregatorFunction create(List inputChannels) { private CountAggregatorFunction(List channels, LongState state) { this.channels = channels; this.state = state; + // no channels specified means count-all/count(*) + this.countAll = channels.isEmpty(); } @Override @@ -64,17 +67,23 @@ public int intermediateBlockCount() { return intermediateStateDesc().size(); } + private int blockIndex() { + return countAll ? 0 : channels.get(0); + } + @Override public void addRawInput(Page page) { - Block block = page.getBlock(channels.get(0)); + Block block = page.getBlock(blockIndex()); LongState state = this.state; - state.longValue(state.longValue() + block.getTotalValueCount()); + int count = countAll ? block.getPositionCount() : block.getTotalValueCount(); + state.longValue(state.longValue() + count); } @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + var blockIndex = blockIndex(); + assert page.getBlockCount() >= blockIndex + intermediateStateDesc().size(); LongVector count = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); assert count.getPositionCount() == 1; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java index 078e0cff99daa..cc33a8de8bf6f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java @@ -30,6 +30,7 @@ public class CountGroupingAggregatorFunction implements GroupingAggregatorFuncti private final LongArrayState state; private final List channels; + private final boolean countAll; public static CountGroupingAggregatorFunction create(BigArrays bigArrays, List inputChannels) { return new CountGroupingAggregatorFunction(inputChannels, new LongArrayState(bigArrays, 0)); @@ -42,6 +43,11 @@ public static List intermediateStateDesc() { private CountGroupingAggregatorFunction(List channels, LongArrayState state) { this.channels = channels; this.state = state; + this.countAll = channels.isEmpty(); + } + + private int blockIndex() { + return countAll ? 0 : channels.get(0); } @Override @@ -51,33 +57,35 @@ public int intermediateBlockCount() { @Override public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block valuesBlock = page.getBlock(channels.get(0)); - if (valuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new AddInput() { // TODO return null meaning "don't collect me" and skip those - @Override - public void add(int positionOffset, IntBlock groupIds) {} - - @Override - public void add(int positionOffset, IntVector groupIds) {} - }; - } - Vector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { + Block valuesBlock = page.getBlock(blockIndex()); + if (countAll == false) { + if (valuesBlock.areAllValuesNull()) { state.enableGroupIdTracking(seenGroupIds); - } - return new AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } + return new AddInput() { // TODO return null meaning "don't collect me" and skip those + @Override + public void add(int positionOffset, IntBlock groupIds) {} - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); + @Override + public void add(int positionOffset, IntVector groupIds) {} + }; + } + Vector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); } - }; + return new AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } } return new AddInput() { @Override @@ -121,6 +129,9 @@ private void addRawInput(int positionOffset, IntBlock groups, Block values) { } } + /** + * This method is called for count all. + */ private void addRawInput(IntVector groups) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); @@ -128,6 +139,9 @@ private void addRawInput(IntVector groups) { } } + /** + * This method is called for count all. + */ private void addRawInput(IntBlock groups) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { // TODO remove the check one we don't emit null anymore @@ -146,7 +160,7 @@ private void addRawInput(IntBlock groups) { @Override public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + assert page.getBlockCount() >= blockIndex() + intermediateStateDesc().size(); state.enableGroupIdTracking(new SeenGroupIds.Empty()); LongVector count = page.getBlock(channels.get(0)).asVector(); BooleanVector seen = page.getBlock(channels.get(1)).asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 1a7bad4366be9..e6761c2de767f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.IntArrayVector; @@ -54,7 +55,7 @@ private IntVector add(BooleanVector vector) { } private IntBlock add(BooleanBlock block) { - return new MultivalueDedupeBoolean(block).hash(everSeen); + return new MultivalueDedupeBoolean(Block.Ref.floating(block)).hash(everSeen); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index ee77e5e3c19b8..6b47711657bee 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -71,7 +72,7 @@ private IntVector add(BytesRefVector vector) { } private IntBlock add(BytesRefBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(block).hash(bytesRefHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(Block.Ref.floating(block)).hash(bytesRefHash); seenNull |= result.sawNull(); return result.ords(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index 3a52beb9c2d87..2a9dbd74db311 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -67,7 +67,7 @@ private IntVector add(DoubleVector vector) { } private IntBlock add(DoubleBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(block).hash(longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(Block.Ref.floating(block)).hash(longHash); seenNull |= result.sawNull(); return result.ords(); } @@ -82,7 +82,8 @@ public DoubleBlock[] getKeys() { } BitSet nulls = new BitSet(1); nulls.set(0); - return new DoubleBlock[] { new DoubleArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.ASCENDING) }; + return new DoubleBlock[] { + new DoubleArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } final int size = Math.toIntExact(longHash.size()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 4fcd9735f6158..ef82b14f2076b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -63,7 +63,7 @@ private IntVector add(IntVector vector) { } private IntBlock add(IntBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeInt(block).hash(longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeInt(Block.Ref.floating(block)).hash(longHash); seenNull |= result.sawNull(); return result.ords(); } @@ -78,7 +78,7 @@ public IntBlock[] getKeys() { } BitSet nulls = new BitSet(1); nulls.set(0); - return new IntBlock[] { new IntArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.ASCENDING) }; + return new IntBlock[] { new IntArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } final int size = Math.toIntExact(longHash.size()); final int[] keys = new int[size]; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 5e5b46ae6eda1..5cdc1824b7067 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -67,7 +67,7 @@ private IntVector add(LongVector vector) { } private IntBlock add(LongBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeLong(block).hash(longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeLong(Block.Ref.floating(block)).hash(longHash); seenNull |= result.sawNull(); return result.ords(); } @@ -82,7 +82,8 @@ public LongBlock[] getKeys() { } BitSet nulls = new BitSet(1); nulls.set(0); - return new LongBlock[] { new LongArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.ASCENDING) }; + return new LongBlock[] { + new LongArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } final int size = Math.toIntExact(longHash.size()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index 31f65e9b70053..9a5fe6e66cb37 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -22,8 +22,6 @@ import org.elasticsearch.compute.operator.BatchEncoder; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupe; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import java.util.Arrays; import java.util.List; @@ -51,19 +49,19 @@ * } */ final class PackedValuesBlockHash extends BlockHash { - private static final Logger logger = LogManager.getLogger(PackedValuesBlockHash.class); static final int DEFAULT_BATCH_SIZE = Math.toIntExact(ByteSizeValue.ofKb(10).getBytes()); - private final List groups; private final int emitBatchSize; private final BytesRefHash bytesRefHash; private final int nullTrackingBytes; + private final BytesRefBuilder bytes = new BytesRefBuilder(); + private final Group[] groups; - PackedValuesBlockHash(List groups, BigArrays bigArrays, int emitBatchSize) { - this.groups = groups; + PackedValuesBlockHash(List specs, BigArrays bigArrays, int emitBatchSize) { + this.groups = specs.stream().map(Group::new).toArray(Group[]::new); this.emitBatchSize = emitBatchSize; this.bytesRefHash = new BytesRefHash(1, bigArrays); - this.nullTrackingBytes = groups.size() / 8 + 1; + this.nullTrackingBytes = (groups.length + 7) / 8; } @Override @@ -75,23 +73,28 @@ void add(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) new AddWork(page, addInput, batchSize).add(); } + private static class Group { + final HashAggregationOperator.GroupSpec spec; + BatchEncoder encoder; + int positionOffset; + int valueOffset; + int loopedIndex; + int valueCount; + int bytesStart; + + Group(HashAggregationOperator.GroupSpec spec) { + this.spec = spec; + } + } + class AddWork extends LongLongBlockHash.AbstractAddBlock { - final BatchEncoder[] encoders = new BatchEncoder[groups.size()]; - final int[] positionOffsets = new int[groups.size()]; - final int[] valueOffsets = new int[groups.size()]; - final BytesRef[] scratches = new BytesRef[groups.size()]; - final BytesRefBuilder bytes = new BytesRefBuilder(); final int positionCount; - int position; - int count; - int bufferedGroup; AddWork(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) { super(emitBatchSize, addInput); - for (int g = 0; g < groups.size(); g++) { - encoders[g] = MultivalueDedupe.batchEncoder(page.getBlock(groups.get(g).channel()), batchSize); - scratches[g] = new BytesRef(); + for (Group group : groups) { + group.encoder = MultivalueDedupe.batchEncoder(new Block.Ref(page.getBlock(group.spec.channel()), page), batchSize, true); } bytes.grow(nullTrackingBytes); this.positionCount = page.getPositionCount(); @@ -104,91 +107,80 @@ class AddWork extends LongLongBlockHash.AbstractAddBlock { */ void add() { for (position = 0; position < positionCount; position++) { - if (logger.isTraceEnabled()) { - logger.trace("position {}", position); - } // Make sure all encoders have encoded the current position and the offsets are queued to it's start - for (int g = 0; g < encoders.length; g++) { - positionOffsets[g]++; - while (positionOffsets[g] >= encoders[g].positionCount()) { - encoders[g].encodeNextBatch(); - positionOffsets[g] = 0; - valueOffsets[g] = 0; + boolean singleEntry = true; + for (Group g : groups) { + var encoder = g.encoder; + g.positionOffset++; + while (g.positionOffset >= encoder.positionCount()) { + encoder.encodeNextBatch(); + g.positionOffset = 0; + g.valueOffset = 0; } + g.valueCount = encoder.valueCount(g.positionOffset); + singleEntry &= (g.valueCount == 1); } - - count = 0; Arrays.fill(bytes.bytes(), 0, nullTrackingBytes, (byte) 0); bytes.setLength(nullTrackingBytes); - addPosition(0); - switch (count) { - case 0 -> throw new IllegalStateException("didn't find any values"); - case 1 -> { - ords.appendInt(bufferedGroup); - addedValue(position); - } - default -> ords.endPositionEntry(); - } - for (int g = 0; g < encoders.length; g++) { - valueOffsets[g] += encoders[g].valueCount(positionOffsets[g]); + if (singleEntry) { + addSingleEntry(); + } else { + addMultipleEntries(); } } emitOrds(); } - private void addPosition(int g) { - if (g == groups.size()) { - addBytes(); - return; - } - int start = bytes.length(); - int count = encoders[g].valueCount(positionOffsets[g]); - assert count > 0; - int valueOffset = valueOffsets[g]; - BytesRef v = encoders[g].read(valueOffset++, scratches[g]); - if (logger.isTraceEnabled()) { - logger.trace("\t".repeat(g + 1) + v); - } - if (v.length == 0) { - assert count == 1 : "null value in non-singleton list"; - int nullByte = g / 8; - int nullShift = g % 8; - bytes.bytes()[nullByte] |= (byte) (1 << nullShift); - } - bytes.setLength(start); - bytes.append(v); - addPosition(g + 1); // TODO stack overflow protection - for (int i = 1; i < count; i++) { - v = encoders[g].read(valueOffset++, scratches[g]); - if (logger.isTraceEnabled()) { - logger.trace("\t".repeat(g + 1) + v); + private void addSingleEntry() { + for (int g = 0; g < groups.length; g++) { + Group group = groups[g]; + if (group.encoder.read(group.valueOffset++, bytes) == 0) { + int nullByte = g / 8; + int nullShift = g % 8; + bytes.bytes()[nullByte] |= (byte) (1 << nullShift); } - assert v.length > 0 : "null value after the first position"; - bytes.setLength(start); - bytes.append(v); - addPosition(g + 1); } + int ord = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); + ords.appendInt(ord); + addedValue(position); } - private void addBytes() { - int group = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); - switch (count) { - case 0 -> bufferedGroup = group; - case 1 -> { - ords.beginPositionEntry(); - ords.appendInt(bufferedGroup); - addedValueInMultivaluePosition(position); - ords.appendInt(group); - addedValueInMultivaluePosition(position); + private void addMultipleEntries() { + ords.beginPositionEntry(); + int g = 0; + outer: for (;;) { + for (; g < groups.length; g++) { + Group group = groups[g]; + group.bytesStart = bytes.length(); + if (group.encoder.read(group.valueOffset + group.loopedIndex, bytes) == 0) { + assert group.valueCount == 1 : "null value in non-singleton list"; + int nullByte = g / 8; + int nullShift = g % 8; + bytes.bytes()[nullByte] |= (byte) (1 << nullShift); + } + ++group.loopedIndex; } - default -> { - ords.appendInt(group); - addedValueInMultivaluePosition(position); + // emit ords + int ord = Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))); + ords.appendInt(ord); + addedValueInMultivaluePosition(position); + + // rewind + Group group = groups[--g]; + bytes.setLength(group.bytesStart); + while (group.loopedIndex == group.valueCount) { + group.loopedIndex = 0; + if (g == 0) { + break outer; + } else { + group = groups[--g]; + bytes.setLength(group.bytesStart); + } } } - count++; - if (logger.isTraceEnabled()) { - logger.trace("{} = {}", bytes.get(), group); + ords.endPositionEntry(); + for (Group group : groups) { + group.valueOffset += group.valueCount; } } } @@ -196,16 +188,16 @@ private void addBytes() { @Override public Block[] getKeys() { int size = Math.toIntExact(bytesRefHash.size()); - BatchEncoder.Decoder[] decoders = new BatchEncoder.Decoder[groups.size()]; - Block.Builder[] builders = new Block.Builder[groups.size()]; + BatchEncoder.Decoder[] decoders = new BatchEncoder.Decoder[groups.length]; + Block.Builder[] builders = new Block.Builder[groups.length]; for (int g = 0; g < builders.length; g++) { - ElementType elementType = groups.get(g).elementType(); + ElementType elementType = groups[g].spec.elementType(); decoders[g] = BatchEncoder.decoder(elementType); builders[g] = elementType.newBlockBuilder(size); } - BytesRef values[] = new BytesRef[(int) Math.min(100, bytesRefHash.size())]; - BytesRef nulls[] = new BytesRef[values.length]; + BytesRef[] values = new BytesRef[(int) Math.min(100, bytesRefHash.size())]; + BytesRef[] nulls = new BytesRef[values.length]; for (int offset = 0; offset < values.length; offset++) { values[offset] = new BytesRef(); nulls[offset] = new BytesRef(); @@ -231,7 +223,7 @@ public Block[] getKeys() { readKeys(decoders, builders, nulls, values, offset); } - Block[] keyBlocks = new Block[groups.size()]; + Block[] keyBlocks = new Block[groups.length]; for (int g = 0; g < keyBlocks.length; g++) { keyBlocks[g] = builders[g].build(); } @@ -271,13 +263,12 @@ public String toString() { StringBuilder b = new StringBuilder(); b.append("PackedValuesBlockHash{groups=["); boolean first = true; - for (HashAggregationOperator.GroupSpec spec : groups) { - if (first) { - first = false; - } else { + for (int i = 0; i < groups.length; i++) { + if (i > 0) { b.append(", "); } - b.append(spec.channel()).append(':').append(spec.elementType()); + Group group = groups[i]; + b.append(group.spec.channel()).append(':').append(group.spec.elementType()); } b.append("], entries=").append(bytesRefHash.size()); b.append(", size=").append(ByteSizeValue.ofBytes(bytesRefHash.ramBytesUsed())); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index a6ad5d1299543..3d06eba398513 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -33,6 +33,8 @@ abstract class AbstractBlockBuilder implements Block.Builder { /** The number of bytes currently estimated with the breaker. */ protected long estimatedBytes; + private boolean closed = false; + protected AbstractBlockBuilder(BlockFactory blockFactory) { this.blockFactory = blockFactory; } @@ -101,7 +103,14 @@ protected final void updatePosition() { } } + /** + * Called during implementations of {@link Block.Builder#build} as a first step + * to check if the block is still open and to finish the last position. + */ protected final void finish() { + if (closed) { + throw new IllegalStateException("already closed"); + } if (positionEntryIsOpen) { endPositionEntry(); } @@ -110,6 +119,16 @@ protected final void finish() { } } + /** + * Called during implementations of {@link Block.Builder#build} as a last step + * to mark the Builder as closed and make sure that further closes don't double + * free memory. + */ + protected final void built() { + closed = true; + estimatedBytes = 0; + } + protected abstract void growValuesArray(int newSize); /** The number of bytes used to represent each value element. */ @@ -125,6 +144,20 @@ protected final void ensureCapacity() { growValuesArray(newSize); } + @Override + public final void close() { + if (closed == false) { + closed = true; + adjustBreaker(-estimatedBytes); + extraClose(); + } + } + + /** + * Called when first {@link #close() closed}. + */ + protected void extraClose() {} + static int calculateNewArraySize(int currentSize) { // trivially, grows array by 50% return currentSize + (currentSize >> 1); @@ -133,6 +166,7 @@ static int calculateNewArraySize(int currentSize) { protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes, false); estimatedBytes += deltaBytes; + assert estimatedBytes >= 0; } private void setFirstValue(int position, int value) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index d83d26cf33831..4a019db5e03c0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -52,7 +52,7 @@ public boolean mayHaveMultivaluedFields() { @Override public final MvOrdering mvOrdering() { - return MvOrdering.UNORDERED; + return MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java index 49ce276074735..274e88cd8d8b6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java @@ -7,9 +7,14 @@ package org.elasticsearch.compute.data; -abstract class AbstractVectorBuilder { +abstract class AbstractVectorBuilder implements Vector.Builder { protected int valueCount; + /** + * Has this builder been closed already? + */ + private boolean closed = false; + protected final BlockFactory blockFactory; /** The number of bytes currently estimated with the breaker. */ @@ -46,4 +51,38 @@ protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes, false); estimatedBytes += deltaBytes; } + + /** + * Called during implementations of {@link Block.Builder#build} as a first step + * to check if the block is still open and to finish the last position. + */ + protected final void finish() { + if (closed) { + throw new IllegalStateException("already closed"); + } + } + + /** + * Called during implementations of {@link Block.Builder#build} as a last step + * to mark the Builder as closed and make sure that further closes don't double + * free memory. + */ + protected final void built() { + closed = true; + estimatedBytes = 0; + } + + @Override + public final void close() { + if (closed == false) { + closed = true; + adjustBreaker(-estimatedBytes); + extraClose(); + } + } + + /** + * Called when first {@link #close() closed}. + */ + protected void extraClose() {} } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 5b10a3a510de0..c5d6780e84685 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import java.util.List; @@ -102,12 +103,20 @@ public interface Block extends Accountable, NamedWriteable, Releasable { /** * How are multivalued fields ordered? - *

Note that there isn't a {@code DESCENDING} because we don't have - * anything that makes descending fields.

+ * Some operators can enable its optimization when mv_values are sorted ascending or de-duplicated. */ enum MvOrdering { - ASCENDING, - UNORDERED; + UNORDERED(false, false), + DEDUPLICATED_UNORDERD(true, false), + DEDUPLICATED_AND_SORTED_ASCENDING(true, true); + + private final boolean deduplicated; + private final boolean sortedAscending; + + MvOrdering(boolean deduplicated, boolean sortedAscending) { + this.deduplicated = deduplicated; + this.sortedAscending = sortedAscending; + } } /** @@ -115,6 +124,20 @@ enum MvOrdering { */ MvOrdering mvOrdering(); + /** + * Are multivalued fields de-duplicated in each position + */ + default boolean mvDeduplicated() { + return mayHaveMultivaluedFields() == false || mvOrdering().deduplicated; + } + + /** + * Are multivalued fields sorted ascending in each position + */ + default boolean mvSortedAscending() { + return mayHaveMultivaluedFields() == false || mvOrdering().sortedAscending; + } + /** * Expand multivalued fields into one row per value. Returns the * block if there aren't any multivalued fields to expand. @@ -133,7 +156,11 @@ static Block constantNullBlock(int positions, BlockFactory blockFactory) { return blockFactory.newConstantNullBlock(positions); } - interface Builder { + /** + * Builds {@link Block}s. Typically, you use one of it's direct supinterfaces like {@link IntBlock.Builder}. + * This is {@link Releasable} and should be released after building the block or if building the block fails. + */ + interface Builder extends Releasable { /** * Appends a null value to the block. @@ -168,7 +195,7 @@ interface Builder { /** * How are multivalued fields ordered? This defaults to {@link Block.MvOrdering#UNORDERED} - * but when you set it to {@link Block.MvOrdering#ASCENDING} some operators can optimize + * but when you set it to {@link Block.MvOrdering#DEDUPLICATED_AND_SORTED_ASCENDING} some operators can optimize * themselves. This is a promise that is never checked. If you set this * to anything other than {@link Block.MvOrdering#UNORDERED} be sure the values are in * that order or other operators will make mistakes. The actual ordering isn't checked @@ -182,6 +209,48 @@ interface Builder { Block build(); } + /** + * A reference to a {@link Block}. This is {@link Releasable} and + * {@link Ref#close closing} it will {@link Block#close release} + * the underlying {@link Block} if it wasn't borrowed from a {@link Page}. + * + * The usual way to use this is: + *
{@code
+     *   try (Block.Ref ref = eval.eval(page)) {
+     *     return ref.block().doStuff;
+     *   }
+     * }
+ * + * The {@code try} block will return the memory used by the block to the + * breaker if it was "free floating", but if it was attached to a {@link Page} + * then it'll do nothing. + * + * @param block the block referenced + * @param containedIn the page containing it or null, if it is "free floating". + */ + record Ref(Block block, @Nullable Page containedIn) implements Releasable { + /** + * Create a "free floating" {@link Ref}. + */ + public static Ref floating(Block block) { + return new Ref(block, null); + } + + /** + * Is this block "free floating" or attached to a page? + */ + public boolean floating() { + return containedIn == null; + } + + @Override + public void close() { + if (floating()) { + block.close(); + } + } + } + static List getNamedWriteables() { return List.of( IntBlock.ENTRY, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 2afea228a4a78..0bb49d205669e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -16,8 +16,6 @@ import org.elasticsearch.compute.data.Block.MvOrdering; import java.util.BitSet; -import java.util.List; -import java.util.ServiceLoader; public class BlockFactory { @@ -26,22 +24,6 @@ public class BlockFactory { BigArrays.NON_RECYCLING_INSTANCE ); - private static final BlockFactory GLOBAL = loadGlobalFactory(); - // new BlockFactory(new NoopCircuitBreaker("esql_noop_breaker"), BigArrays.NON_RECYCLING_INSTANCE); - - private static BlockFactory loadGlobalFactory() { - ServiceLoader loader = ServiceLoader.load( - BlockFactoryParameters.class, - BlockFactory.class.getClassLoader() - ); - List> impls = loader.stream().toList(); - if (impls.size() != 1) { - throw new AssertionError("expected exactly one impl, but got:" + impls); - } - BlockFactoryParameters params = impls.get(0).get(); - return new BlockFactory(params.breaker(), params.bigArrays()); - } - private final CircuitBreaker breaker; private final BigArrays bigArrays; @@ -51,13 +33,6 @@ public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { this.bigArrays = bigArrays; } - /** - * Returns the global ESQL block factory. - */ - public static BlockFactory getGlobalInstance() { - return GLOBAL; - } - /** * Returns the Non-Breaking block factory. */ @@ -92,11 +67,11 @@ void adjustBreaker(final long delta, final boolean isDataAlreadyCreated) { try { breaker.addEstimateBytesAndMaybeBreak(delta, ""); } catch (CircuitBreakingException e) { - if (isDataAlreadyCreated) { - // since we've already created the data, we need to - // add it so closing the stream re-adjusts properly - breaker.addWithoutBreaking(delta); - } + // if (isDataAlreadyCreated) { // TODO: remove isDataAlreadyCreated + // since we've already created the data, we need to + // add it so closing the stream re-adjusts properly + // breaker.addWithoutBreaking(delta); + // } // re-throw the original exception throw e; } @@ -138,15 +113,13 @@ BooleanVector.FixedBuilder newBooleanVectorFixedBuilder(int size) { return new BooleanVectorFixedBuilder(size, this); } - public BooleanBlock newBooleanArrayBlock( - boolean[] values, - int positionCount, - int[] firstValueIndexes, - BitSet nulls, - MvOrdering mvOrdering - ) { - var b = new BooleanArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed(), true); + public final BooleanBlock newBooleanArrayBlock(boolean[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + return newBooleanArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); + } + + public BooleanBlock newBooleanArrayBlock(boolean[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrder, long preAdjustedBytes) { + var b = new BooleanArrayBlock(values, pc, fvi, nulls, mvOrder, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -154,7 +127,7 @@ public BooleanVector.Builder newBooleanVectorBuilder(int estimatedSize) { return new BooleanVectorBuilder(estimatedSize, this); } - public BooleanVector newBooleanArrayVector(boolean[] values, int positionCount) { + public final BooleanVector newBooleanArrayVector(boolean[] values, int positionCount) { return newBooleanArrayVector(values, positionCount, 0L); } @@ -164,9 +137,13 @@ public BooleanVector newBooleanArrayVector(boolean[] values, int positionCount, return b; } - public BooleanBlock newConstantBooleanBlockWith(boolean value, int positions) { + public final BooleanBlock newConstantBooleanBlockWith(boolean value, int positions) { + return newConstantBooleanBlockWith(value, positions, 0L); + } + + public BooleanBlock newConstantBooleanBlockWith(boolean value, int positions, long preAdjustedBytes) { var b = new ConstantBooleanVector(value, positions, this).asBlock(); - adjustBreaker(b.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -174,9 +151,13 @@ public IntBlock.Builder newIntBlockBuilder(int estimatedSize) { return new IntBlockBuilder(estimatedSize, this); } - public IntBlock newIntArrayBlock(int[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - var b = new IntArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed(), true); + public final IntBlock newIntArrayBlock(int[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + return newIntArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, 0L); + } + + public IntBlock newIntArrayBlock(int[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { + var b = new IntArrayBlock(values, pc, fvi, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -192,7 +173,7 @@ IntVector.FixedBuilder newIntVectorFixedBuilder(int size) { * Creates a new Vector with the given values and positionCount. Equivalent to: * newIntArrayVector(values, positionCount, 0L); // with zero pre-adjusted bytes */ - public IntVector newIntArrayVector(int[] values, int positionCount) { + public final IntVector newIntArrayVector(int[] values, int positionCount) { return newIntArrayVector(values, positionCount, 0L); } @@ -213,9 +194,13 @@ public IntVector newIntArrayVector(int[] values, int positionCount, long preAdju return b; } - public IntBlock newConstantIntBlockWith(int value, int positions) { + public final IntBlock newConstantIntBlockWith(int value, int positions) { + return newConstantIntBlockWith(value, positions, 0L); + } + + public IntBlock newConstantIntBlockWith(int value, int positions, long preAdjustedBytes) { var b = new ConstantIntVector(value, positions, this).asBlock(); - adjustBreaker(b.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -223,9 +208,13 @@ public LongBlock.Builder newLongBlockBuilder(int estimatedSize) { return new LongBlockBuilder(estimatedSize, this); } - public LongBlock newLongArrayBlock(long[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - var b = new LongArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed(), true); + public final LongBlock newLongArrayBlock(long[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + return newLongArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); + } + + public LongBlock newLongArrayBlock(long[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { + var b = new LongArrayBlock(values, pc, fvi, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -237,7 +226,7 @@ LongVector.FixedBuilder newLongVectorFixedBuilder(int size) { return new LongVectorFixedBuilder(size, this); } - public LongVector newLongArrayVector(long[] values, int positionCount) { + public final LongVector newLongArrayVector(long[] values, int positionCount) { return newLongArrayVector(values, positionCount, 0L); } @@ -247,9 +236,13 @@ public LongVector newLongArrayVector(long[] values, int positionCount, long preA return b; } - public LongBlock newConstantLongBlockWith(long value, int positions) { + public final LongBlock newConstantLongBlockWith(long value, int positions) { + return newConstantLongBlockWith(value, positions, 0L); + } + + public LongBlock newConstantLongBlockWith(long value, int positions, long preAdjustedBytes) { var b = new ConstantLongVector(value, positions, this).asBlock(); - adjustBreaker(b.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -257,15 +250,14 @@ public DoubleBlock.Builder newDoubleBlockBuilder(int estimatedSize) { return new DoubleBlockBuilder(estimatedSize, this); } - public DoubleBlock newDoubleArrayBlock( - double[] values, - int positionCount, - int[] firstValueIndexes, - BitSet nulls, - MvOrdering mvOrdering - ) { - var b = new DoubleArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed(), true); + public final DoubleBlock newDoubleArrayBlock(double[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + return newDoubleArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); + + } + + public DoubleBlock newDoubleArrayBlock(double[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { + var b = new DoubleArrayBlock(values, pc, fvi, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -277,7 +269,7 @@ DoubleVector.FixedBuilder newDoubleVectorFixedBuilder(int size) { return new DoubleVectorFixedBuilder(size, this); } - public DoubleVector newDoubleArrayVector(double[] values, int positionCount) { + public final DoubleVector newDoubleArrayVector(double[] values, int positionCount) { return newDoubleArrayVector(values, positionCount, 0L); } @@ -287,9 +279,13 @@ public DoubleVector newDoubleArrayVector(double[] values, int positionCount, lon return b; } - public DoubleBlock newConstantDoubleBlockWith(double value, int positions) { + public final DoubleBlock newConstantDoubleBlockWith(double value, int positions) { + return newConstantDoubleBlockWith(value, positions, 0L); + } + + public DoubleBlock newConstantDoubleBlockWith(double value, int positions, long preAdjustedBytes) { var b = new ConstantDoubleVector(value, positions, this).asBlock(); - adjustBreaker(b.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); return b; } @@ -297,15 +293,9 @@ public BytesRefBlock.Builder newBytesRefBlockBuilder(int estimatedSize) { return new BytesRefBlockBuilder(estimatedSize, bigArrays, this); } - public BytesRefBlock newBytesRefArrayBlock( - BytesRefArray values, - int positionCount, - int[] firstValueIndexes, - BitSet nulls, - MvOrdering mvOrdering - ) { - var b = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + public BytesRefBlock newBytesRefArrayBlock(BytesRefArray values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + var b = new BytesRefArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed(), true); return b; } @@ -315,7 +305,7 @@ public BytesRefVector.Builder newBytesRefVectorBuilder(int estimatedSize) { public BytesRefVector newBytesRefArrayVector(BytesRefArray values, int positionCount) { var b = new BytesRefArrayVector(values, positionCount, this); - adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed(), true); return b; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java deleted file mode 100644 index a9dc11635f8c0..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.data; - -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.util.BigArrays; - -/** - * Allows to inject instances of a breaker and bigArrays into the Global block factory. - * The Global factory is somewhat temporary, therefore this interface and its ServiceLoader - * machinery can be removed once the Global factory is removed. - */ -public interface BlockFactoryParameters { - - CircuitBreaker breaker(); - - BigArrays bigArrays(); -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 2ebbb771b5df1..a41ea0383368d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -8,10 +8,13 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Randomness; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Random; import java.util.function.Consumer; import static org.elasticsearch.common.lucene.BytesRefs.toBytesRef; @@ -68,8 +71,13 @@ public static Block[] fromListRow(List row, int blockSize) { if (object instanceof List listVal) { BuilderWrapper wrapper = wrapperFor(fromJava(listVal.get(0).getClass()), blockSize); wrapper.accept(listVal); - if (isAscending(listVal)) { - wrapper.builder.mvOrdering(Block.MvOrdering.ASCENDING); + Random random = Randomness.get(); + if (isDeduplicated(listVal) && random.nextBoolean()) { + if (isAscending(listVal) && random.nextBoolean()) { + wrapper.builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } else { + wrapper.builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_UNORDERD); + } } blocks[i] = wrapper.builder.build(); } else { @@ -100,6 +108,14 @@ private static boolean isAscending(List values) { return true; } + /** + * Detect blocks with deduplicated fields. This is *mostly* useful for + * exercising the specialized ascending implementations. + */ + private static boolean isDeduplicated(List values) { + return new HashSet<>(values).size() == values.size(); + } + public static Block[] fromList(List> list) { var size = list.size(); if (size == 0) { @@ -210,7 +226,7 @@ public static Object toJavaObject(Block block, int position) { private static Object valueAtOffset(Block block, int offset) { return switch (block.elementType()) { case BOOLEAN -> ((BooleanBlock) block).getBoolean(offset); - case BYTES_REF -> ((BytesRefBlock) block).getBytesRef(offset, new BytesRef()); + case BYTES_REF -> BytesRef.deepCopyOf(((BytesRefBlock) block).getBytesRef(offset, new BytesRef())); case DOUBLE -> ((DoubleBlock) block).getDouble(offset); case INT -> ((IntBlock) block).getInt(offset); case LONG -> ((LongBlock) block).getLong(offset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 2da9cfeba09f0..01994af1cfc96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -136,6 +136,11 @@ public void close() { static class Builder implements Block.Builder { private int positionCount; + /** + * Has this builder been closed already? + */ + private boolean closed = false; + @Override public Builder appendNull() { positionCount++; @@ -174,7 +179,16 @@ public Block.Builder mvOrdering(MvOrdering mvOrdering) { @Override public Block build() { + if (closed) { + throw new IllegalStateException("already closed"); + } + close(); return new ConstantNullBlock(positionCount); } + + @Override + public void close() { + closed = true; + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index b21a956980f6a..6bcf913ce6240 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -82,8 +82,8 @@ public void close() { /** * A builder the for {@link DocBlock}. */ - public static Builder newBlockBuilder(int estimatedSize) { - return new Builder(estimatedSize); + public static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return new Builder(estimatedSize, blockFactory); } public static class Builder implements Block.Builder { @@ -91,10 +91,10 @@ public static class Builder implements Block.Builder { private final IntVector.Builder segments; private final IntVector.Builder docs; - private Builder(int estimatedSize) { - shards = IntVector.newVectorBuilder(estimatedSize); - segments = IntVector.newVectorBuilder(estimatedSize); - docs = IntVector.newVectorBuilder(estimatedSize); + private Builder(int estimatedSize, BlockFactory blockFactory) { + shards = IntVector.newVectorBuilder(estimatedSize, blockFactory); + segments = IntVector.newVectorBuilder(estimatedSize, blockFactory); + docs = IntVector.newVectorBuilder(estimatedSize, blockFactory); } public Builder appendShard(int shard) { @@ -153,5 +153,10 @@ public DocBlock build() { // Pass null for singleSegmentNonDecreasing so we calculate it when we first need it. return new DocVector(shards.build(), segments.build(), docs.build(), null).asBlock(); } + + @Override + public void close() { + Releasables.closeExpectNoException(shards, segments, docs); + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 0c85d433018e0..4467766a9e0ef 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -9,8 +9,6 @@ import org.apache.lucene.util.BytesRef; -import java.util.function.IntFunction; - /** * The type of elements in {@link Block} and {@link Vector} */ @@ -22,7 +20,7 @@ public enum ElementType { /** * Blocks containing only null values. */ - NULL(estimatedSize -> new ConstantNullBlock.Builder()), + NULL((estimatedSize, blockFactory) -> new ConstantNullBlock.Builder()), BYTES_REF(BytesRefBlock::newBlockBuilder), @@ -34,19 +32,32 @@ public enum ElementType { /** * Intermediate blocks which don't support retrieving elements. */ - UNKNOWN(estimatedSize -> { throw new UnsupportedOperationException("can't build null blocks"); }); + UNKNOWN((estimatedSize, blockFactory) -> { throw new UnsupportedOperationException("can't build null blocks"); }); + + interface BuilderSupplier { + Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory); + } - private final IntFunction builder; + private final BuilderSupplier builder; - ElementType(IntFunction builder) { + ElementType(BuilderSupplier builder) { this.builder = builder; } /** * Create a new {@link Block.Builder} for blocks of this type. + * @deprecated use {@link #newBlockBuilder(int, BlockFactory)} */ + @Deprecated public Block.Builder newBlockBuilder(int estimatedSize) { - return builder.apply(estimatedSize); + return builder.newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + /** + * Create a new {@link Block.Builder} for blocks of this type. + */ + public Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return builder.newBlockBuilder(estimatedSize, blockFactory); } public static ElementType fromJava(Class type) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java index 873565592dfaf..a4c89422213b1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -69,9 +68,10 @@ private Page(boolean copyBlocks, int positionCount, Block[] blocks) { // assert assertPositionCount(blocks); this.positionCount = positionCount; this.blocks = copyBlocks ? blocks.clone() : blocks; - if (Assertions.ENABLED) { - for (Block b : blocks) { - assert b.getPositionCount() == positionCount : "expected positionCount=" + positionCount + " but was " + b; + for (Block b : blocks) { + assert b.getPositionCount() == positionCount : "expected positionCount=" + positionCount + " but was " + b; + if (b.isReleased()) { + throw new IllegalArgumentException("can't build page out of released blocks but [" + b + "] was released"); } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 171bdbd62f4d0..c9ecf1aa9e399 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -50,7 +50,11 @@ public interface Vector extends Accountable, Releasable { /** The block factory associated with this vector. */ BlockFactory blockFactory(); - interface Builder { + /** + * Builds {@link Vector}s. Typically, you use one of it's direct supinterfaces like {@link IntVector.Builder}. + * This is {@link Releasable} and should be released after building the vector or if building the vector fails. + */ + interface Builder extends Releasable { /** * Builds the block. This method can be called multiple times. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 10ff868c09806..ddb0eced039be 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -93,6 +93,7 @@ $endif$ public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override @@ -137,7 +138,7 @@ $endif$ } released = true; $if(BytesRef)$ - blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); $else$ blockFactory.adjustBreaker(-ramBytesUsed(), true); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index b6a8714f882ee..3e6ccc2286675 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -110,7 +110,7 @@ $endif$ $if(BytesRef)$ @Override public void close() { - blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 4d43f25577cc5..241dba127209b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -14,6 +14,8 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.core.Releasables; $else$ +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; $endif$ @@ -41,7 +43,7 @@ $else$ $Type$BlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new $type$[initialSize]; } $endif$ @@ -246,27 +248,59 @@ $endif$ public $Type$Block build() { finish(); $Type$Block block; - if (hasNonNullValue && positionCount == 1 && valueCount == 1) { $if(BytesRef)$ + assert estimatedBytes == 0 || firstValueIndexes != null; + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); Releasables.closeExpectNoException(values); } else { - estimatedBytes += values.ramBytesUsed(); + if (isDense() && singleValued()) { + block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); + } else { + block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); + } + values = null; $else$ - block = new Constant$Type$Vector(values[0], 1, blockFactory).asBlock(); + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { + block = blockFactory.newConstant$Type$BlockWith(values[0], 1, estimatedBytes); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } -$endif$ if (isDense() && singleValued()) { - block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); + block = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + block = blockFactory.new$Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, estimatedBytes); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); +$endif$ + built(); return block; } +$if(BytesRef)$ + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st index b813120b42e43..3241a372b7d54 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st @@ -83,24 +83,53 @@ $endif$ @Override public $Type$Vector build() { + finish(); $Type$Vector vector; - if (valueCount == 1) { $if(BytesRef)$ + assert estimatedBytes == 0; + if (valueCount == 1) { vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed(), false); Releasables.closeExpectNoException(values); } else { - estimatedBytes = values.ramBytesUsed(); + vector = new $Type$ArrayVector(values, valueCount, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - values.bigArraysRamBytesUsed(), false); + } + values = null; $else$ - vector = new Constant$Type$Vector(values[0], 1, blockFactory); + if (valueCount == 1) { + vector = blockFactory.newConstant$Type$BlockWith(values[0], 1, estimatedBytes).asVector(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } -$endif$ - vector = new $Type$ArrayVector(values, valueCount, blockFactory); + vector = blockFactory.new$Type$ArrayVector(values, valueCount, estimatedBytes); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); +$endif$ + built(); return vector; } +$if(BytesRef)$ + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index 86bc6b0a095d6..6cbc9e663981a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -18,6 +18,7 @@ import org.apache.lucene.util.RamUsageEstimator; final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { private final BlockFactory blockFactory; private final $type$[] values; + private final long preAdjustedBytes; /** * The next value to write into. {@code -1} means the vector has already * been built. @@ -25,7 +26,8 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { private int nextIndex; $Type$VectorFixedBuilder(int size, BlockFactory blockFactory) { - blockFactory.adjustBreaker(ramBytesUsed(size), false); + preAdjustedBytes = ramBytesUsed(size); + blockFactory.adjustBreaker(preAdjustedBytes, false); this.blockFactory = blockFactory; this.values = new $type$[size]; } @@ -54,8 +56,16 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { } nextIndex = -1; if (values.length == 1) { - return new Constant$Type$Vector(values[0], 1, blockFactory); + return blockFactory.newConstant$Type$BlockWith(values[0], 1, preAdjustedBytes).asVector(); + } + return blockFactory.new$Type$ArrayVector(values, values.length, preAdjustedBytes); + } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-preAdjustedBytes, false); } - return new $Type$ArrayVector(values, values.length, blockFactory); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java index 4290075b05ae8..28a9359497393 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java @@ -142,7 +142,7 @@ private static class LongSingletonValuesReader extends BlockDocValuesReader { @Override public LongBlock.Builder builder(int positionCount) { - return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -197,7 +197,7 @@ private static class LongValuesReader extends BlockDocValuesReader { @Override public LongBlock.Builder builder(int positionCount) { - return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -259,7 +259,7 @@ private static class IntSingletonValuesReader extends BlockDocValuesReader { @Override public IntBlock.Builder builder(int positionCount) { - return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -314,7 +314,7 @@ private static class IntValuesReader extends BlockDocValuesReader { @Override public IntBlock.Builder builder(int positionCount) { - return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -378,7 +378,7 @@ private static class DoubleSingletonValuesReader extends BlockDocValuesReader { @Override public DoubleBlock.Builder builder(int positionCount) { - return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -435,7 +435,7 @@ private static class DoubleValuesReader extends BlockDocValuesReader { @Override public DoubleBlock.Builder builder(int positionCount) { - return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -497,7 +497,7 @@ private static class BytesValuesReader extends BlockDocValuesReader { @Override public BytesRefBlock.Builder builder(int positionCount) { - return BytesRefBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return BytesRefBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -558,7 +558,7 @@ private static class BooleanSingletonValuesReader extends BlockDocValuesReader { @Override public BooleanBlock.Builder builder(int positionCount) { - return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override @@ -613,7 +613,7 @@ private static class BooleanValuesReader extends BlockDocValuesReader { @Override public BooleanBlock.Builder builder(int positionCount) { - return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.ASCENDING); + return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java deleted file mode 100644 index 6a2625bf53845..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockOrdinalsReader.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.SortedSetDocValues; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; - -import java.io.IOException; - -public final class BlockOrdinalsReader { - private final SortedSetDocValues sortedSetDocValues; - private final Thread creationThread; - - public BlockOrdinalsReader(SortedSetDocValues sortedSetDocValues) { - this.sortedSetDocValues = sortedSetDocValues; - this.creationThread = Thread.currentThread(); - } - - public IntBlock readOrdinals(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); - for (int p = 0; p < positionCount; p++) { - int doc = docs.getInt(p); - if (false == sortedSetDocValues.advanceExact(doc)) { - builder.appendNull(); - continue; - } - int count = sortedSetDocValues.docValueCount(); - // TODO don't come this way if there are a zillion ords on the field - if (count == 1) { - builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd())); - continue; - } - builder.beginPositionEntry(); - for (int i = 0; i < count; i++) { - builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd())); - } - builder.endPositionEntry(); - } - return builder.build(); - } - - public int docID() { - return sortedSetDocValues.docID(); - } - - /** - * Checks if the reader can be used to read a range documents starting with the given docID by the current thread. - */ - public static boolean canReuse(BlockOrdinalsReader reader, int startingDocID) { - return reader != null && reader.creationThread == Thread.currentThread() && reader.docID() <= startingDocID; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java new file mode 100644 index 0000000000000..e1e5b11c5b8c7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorable; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Weight; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.List; +import java.util.function.Function; + +/** + * Source operator that incrementally counts the results in Lucene searches + * Returns always one entry that mimics the Count aggregation internal state: + * 1. the count as a long (0 if no doc is seen) + * 2. a bool flag (seen) that's always true meaning that the group (all items) always exists + */ +public class LuceneCountOperator extends LuceneOperator { + + private static final int PAGE_SIZE = 1; + + private int totalHits = 0; + private int remainingDocs; + + private final LeafCollector leafCollector; + + public static class Factory implements LuceneOperator.Factory { + private final DataPartitioning dataPartitioning; + private final int taskConcurrency; + private final int limit; + private final LuceneSliceQueue sliceQueue; + + public Factory( + List searchContexts, + Function queryFunction, + DataPartitioning dataPartitioning, + int taskConcurrency, + int limit + ) { + this.limit = limit; + this.dataPartitioning = dataPartitioning; + var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); + this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + } + + @Override + public SourceOperator get(DriverContext driverContext) { + return new LuceneCountOperator(sliceQueue, limit); + } + + @Override + public int taskConcurrency() { + return taskConcurrency; + } + + public int limit() { + return limit; + } + + @Override + public String describe() { + return "LuceneCountOperator[dataPartitioning = " + dataPartitioning + ", limit = " + limit + "]"; + } + } + + public LuceneCountOperator(LuceneSliceQueue sliceQueue, int limit) { + super(PAGE_SIZE, sliceQueue); + this.remainingDocs = limit; + this.leafCollector = new LeafCollector() { + @Override + public void setScorer(Scorable scorer) {} + + @Override + public void collect(int doc) { + if (remainingDocs > 0) { + remainingDocs--; + totalHits++; + } + } + }; + } + + @Override + public boolean isFinished() { + return doneCollecting || remainingDocs == 0; + } + + @Override + public void finish() { + doneCollecting = true; + } + + @Override + public Page getOutput() { + if (isFinished()) { + assert remainingDocs <= 0 : remainingDocs; + return null; + } + try { + final LuceneScorer scorer = getCurrentOrLoadNextScorer(); + // no scorer means no more docs + if (scorer == null) { + remainingDocs = 0; + } else { + Weight weight = scorer.weight(); + var leafReaderContext = scorer.leafReaderContext(); + // see org.apache.lucene.search.TotalHitCountCollector + int leafCount = weight == null ? -1 : weight.count(leafReaderContext); + if (leafCount != -1) { + // make sure to NOT multi count as the count _shortcut_ (which is segment wide) + // handle doc partitioning where the same leaf can be seen multiple times + // since the count is global, consider it only for the first partition and skip the rest + // SHARD, SEGMENT and the first DOC_ reader in data partitioning contain the first doc (position 0) + if (scorer.position() == 0) { + // check to not count over the desired number of docs/limit + var count = Math.min(leafCount, remainingDocs); + totalHits += count; + remainingDocs -= count; + scorer.markAsDone(); + } + } else { + // could not apply shortcut, trigger the search + scorer.scoreNextRange(leafCollector, leafReaderContext.reader().getLiveDocs(), remainingDocs); + } + } + + Page page = null; + // emit only one page + if (remainingDocs <= 0 && pagesEmitted == 0) { + pagesEmitted++; + page = new Page( + PAGE_SIZE, + LongBlock.newConstantBlockWith(totalHits, PAGE_SIZE), + BooleanBlock.newConstantBlockWith(true, PAGE_SIZE) + ); + } + return page; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + protected void describe(StringBuilder sb) { + sb.append(", remainingDocs=").append(remainingDocs); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index ec1e13d033a8b..74baecf154fec 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -22,6 +22,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -31,6 +33,7 @@ import java.util.function.Function; public abstract class LuceneOperator extends SourceOperator { + private static final Logger logger = LogManager.getLogger(LuceneOperator.class); public static final int NO_LIMIT = Integer.MAX_VALUE; @@ -56,9 +59,7 @@ public interface Factory extends SourceOperator.SourceOperatorFactory { } @Override - public void close() { - - } + public void close() {} LuceneScorer getCurrentOrLoadNextScorer() { while (currentScorer == null || currentScorer.isDone()) { @@ -76,14 +77,15 @@ LuceneScorer getCurrentOrLoadNextScorer() { } } final PartialLeafReaderContext partialLeaf = currentSlice.getLeaf(sliceIndex++); - final LeafReaderContext leaf = partialLeaf.leafReaderContext; + logger.trace("Starting {}", partialLeaf); + final LeafReaderContext leaf = partialLeaf.leafReaderContext(); if (currentScorer == null || currentScorer.leafReaderContext() != leaf) { final Weight weight = currentSlice.weight().get(); currentScorer = new LuceneScorer(currentSlice.shardIndex(), currentSlice.searchContext(), weight, leaf); } - assert currentScorer.maxPosition <= partialLeaf.maxDoc : currentScorer.maxPosition + ">" + partialLeaf.maxDoc; - currentScorer.maxPosition = partialLeaf.maxDoc; - currentScorer.position = Math.max(currentScorer.position, partialLeaf.minDoc); + assert currentScorer.maxPosition <= partialLeaf.maxDoc() : currentScorer.maxPosition + ">" + partialLeaf.maxDoc(); + currentScorer.maxPosition = partialLeaf.maxDoc(); + currentScorer.position = Math.max(currentScorer.position, partialLeaf.minDoc()); } if (Thread.currentThread() != currentScorer.executingThread) { currentScorer.reinitialize(); @@ -146,6 +148,14 @@ int shardIndex() { SearchContext searchContext() { return searchContext; } + + Weight weight() { + return weight; + } + + int position() { + return position; + } } @Override @@ -175,49 +185,62 @@ public static class Status implements Operator.Status { private final int processedSlices; private final int totalSlices; private final int pagesEmitted; - private final int slicePosition; - private final int sliceSize; + private final int sliceIndex; + private final int sliceMin; + private final int sliceMax; + private final int current; private Status(LuceneOperator operator) { processedSlices = operator.processSlices; + sliceIndex = operator.sliceIndex; totalSlices = operator.sliceQueue.totalSlices(); LuceneSlice slice = operator.currentSlice; - final PartialLeafReaderContext leaf; - int sliceIndex = operator.sliceIndex; if (slice != null && sliceIndex < slice.numLeaves()) { - leaf = slice.getLeaf(sliceIndex); + PartialLeafReaderContext leaf = slice.getLeaf(sliceIndex); + sliceMin = leaf.minDoc(); + sliceMax = leaf.maxDoc(); } else { - leaf = null; + sliceMin = 0; + sliceMax = 0; } LuceneScorer scorer = operator.currentScorer; - slicePosition = scorer != null ? scorer.position : 0; - sliceSize = leaf != null ? leaf.maxDoc - leaf.minDoc : 0; + if (scorer == null) { + current = 0; + } else { + current = scorer.position; + } pagesEmitted = operator.pagesEmitted; } - Status(int processedSlices, int totalSlices, int pagesEmitted, int slicePosition, int sliceSize) { + Status(int processedSlices, int sliceIndex, int totalSlices, int pagesEmitted, int sliceMin, int sliceMax, int current) { this.processedSlices = processedSlices; + this.sliceIndex = sliceIndex; this.totalSlices = totalSlices; - this.slicePosition = slicePosition; - this.sliceSize = sliceSize; this.pagesEmitted = pagesEmitted; + this.sliceMin = sliceMin; + this.sliceMax = sliceMax; + this.current = current; } Status(StreamInput in) throws IOException { processedSlices = in.readVInt(); + sliceIndex = in.readVInt(); totalSlices = in.readVInt(); - slicePosition = in.readVInt(); - sliceSize = in.readVInt(); pagesEmitted = in.readVInt(); + sliceMin = in.readVInt(); + sliceMax = in.readVInt(); + current = in.readVInt(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(processedSlices); + out.writeVInt(sliceIndex); out.writeVInt(totalSlices); - out.writeVInt(slicePosition); - out.writeVInt(sliceSize); out.writeVInt(pagesEmitted); + out.writeVInt(sliceMin); + out.writeVInt(sliceMax); + out.writeVInt(current); } @Override @@ -225,11 +248,15 @@ public String getWriteableName() { return ENTRY.name; } - public int currentLeaf() { + public int processedSlices() { return processedSlices; } - public int totalLeaves() { + public int sliceIndex() { + return sliceIndex; + } + + public int totalSlices() { return totalSlices; } @@ -237,22 +264,28 @@ public int pagesEmitted() { return pagesEmitted; } - public int slicePosition() { - return slicePosition; + public int sliceMin() { + return sliceMin; + } + + public int sliceMax() { + return sliceMax; } - public int sliceSize() { - return sliceSize; + public int current() { + return current; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("processed_sliced", processedSlices); + builder.field("processed_slices", processedSlices); + builder.field("slice_index", sliceIndex); builder.field("total_slices", totalSlices); - builder.field("slice_position", slicePosition); - builder.field("slice_size", sliceSize); builder.field("pages_emitted", pagesEmitted); + builder.field("slice_min", sliceMin); + builder.field("slice_max", sliceMax); + builder.field("current", current); return builder.endObject(); } @@ -262,15 +295,17 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; return processedSlices == status.processedSlices + && sliceIndex == status.sliceIndex && totalSlices == status.totalSlices && pagesEmitted == status.pagesEmitted - && slicePosition == status.slicePosition - && sliceSize == status.sliceSize; + && sliceMin == status.sliceMin + && sliceMax == status.sliceMax + && current == status.current; } @Override public int hashCode() { - return Objects.hash(processedSlices, totalSlices, pagesEmitted, slicePosition, sliceSize); + return Objects.hash(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 7d96416649636..faf3d6437282a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -117,7 +117,7 @@ static List> docSlices(IndexReader indexReader, i } if (slices.stream() .flatMapToInt( - l -> l.stream().mapToInt(partialLeafReaderContext -> partialLeafReaderContext.maxDoc - partialLeafReaderContext.minDoc) + l -> l.stream().mapToInt(partialLeafReaderContext -> partialLeafReaderContext.maxDoc() - partialLeafReaderContext.minDoc()) ) .sum() != totalDocCount) { throw new IllegalStateException("wrong doc count"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java index 964827a41516e..e9063c9597c5f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java @@ -9,18 +9,13 @@ import org.apache.lucene.index.LeafReaderContext; -public final class PartialLeafReaderContext { - - final LeafReaderContext leafReaderContext; - final int minDoc; // incl - final int maxDoc; // excl - - public PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { - this.leafReaderContext = leafReaderContext; - this.minDoc = minDoc; - this.maxDoc = maxDoc; - } - +/** + * A subset of a {@link LeafReaderContext}. + * @param leafReaderContext the context to subset + * @param minDoc the first document + * @param maxDoc one more than the last document + */ +public record PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { public PartialLeafReaderContext(LeafReaderContext leafReaderContext) { this(leafReaderContext, 0, leafReaderContext.reader().maxDoc()); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java index 76f9250f24fb4..ca4dbccb5b442 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -79,7 +80,11 @@ protected Status status(int pagesProcessed) { } @Override - public void close() {} + public void close() { + if (prev != null) { + Releasables.closeExpectNoException(() -> prev.releaseBlocks()); + } + } public static class Status implements Operator.Status { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java index a0a88c0d8e0b9..f530cdc3fa124 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/BatchEncoder.java @@ -53,86 +53,25 @@ public static Decoder decoder(ElementType elementType) { }; } - private static long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(BatchEncoder.class); - - /** - * Buffer into which we encode values. - */ - protected final BytesRefBuilder bytes = new BytesRefBuilder(); - - /** - * Count of values at each position. - */ - private int[] counts = new int[ArrayUtil.oversize(10, Integer.BYTES)]; - - /** - * Offsets into the {@link #bytes} for each value. - */ - private int[] valueOffsets = new int[ArrayUtil.oversize(10, Integer.BYTES)]; - - /** - * The first position in the current batch. - */ - private int firstPosition; - - /** - * The number of positions in the current batch. It's also the maximum index into - * {@link #counts} that has an meaning. - */ - private int positionCount; - - /** - * The value being encoded right now. - */ - private int currentValue; - - /** - * Build the encoder. - * @param batchSize The number of bytes in a batch. We'll allocate this much memory for the - * encoder and only expand the allocation if the first entry in a batch - * doesn't fit into the buffer. - */ - BatchEncoder(int batchSize) { - bytes.grow(batchSize); - } - - /** - * The first position in the current batch. - */ - public int firstPosition() { - return firstPosition; - } - /** * The number of positions in the current batch. */ - public int positionCount() { - return positionCount; - } + public abstract int positionCount(); /** * The number of values at the position with this offset in the batch. - * The actual position in the block we're encoding is {@code positionOffset + firstPosition()}. */ - public int valueCount(int positionOffset) { - if (positionOffset >= positionCount) { - throw new IllegalArgumentException("wanted " + positionOffset + " but only have " + positionCount); - } - return counts[positionOffset]; - } + public abstract int valueCount(int positionOffset); /** - * Read the value at the specified index. Values at the first position - * start at index {@code 0} and advance one per value. So the values - * at position n start at {@code (0..n-1).sum(valueCount)}. There is + * Read the value at the specified index then append to the {@code dst}. + * Values at the first position start at index {@code 0} and advance one per value. + * So the values at position n start at {@code (0..n-1).sum(valueCount)}. There is * no random-access way to get the first index for a position. + * + * @return the number of bytes has read */ - public final BytesRef read(int index, BytesRef scratch) { - scratch.bytes = bytes.bytes(); - scratch.offset = valueOffsets[index]; - scratch.length = valueOffsets[index + 1] - scratch.offset; - return scratch; - } + public abstract int read(int index, BytesRefBuilder dst); /** * Encodes the next batch of entries. This will encode values until the next @@ -144,81 +83,227 @@ public final BytesRef read(int index, BytesRef scratch) { * expand and encode that entry. *

*/ - public final void encodeNextBatch() { - bytes.clear(); - firstPosition += positionCount; - positionCount = 0; - currentValue = 0; - readNextBatch(); - } + public abstract void encodeNextBatch(); - @Override - public long ramBytesUsed() { - return SHALLOW_SIZE + RamUsageEstimator.sizeOf(counts) + RamUsageEstimator.sizeOf(valueOffsets); - } + protected abstract static class MVEncoder extends BatchEncoder { + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MVEncoder.class); - /** - * Encodes the next batch of values. See {@link #encodeNextBatch()}. - */ - protected abstract void readNextBatch(); + /** + * Buffer into which we encode values. + */ + protected final BytesRefBuilder bytes = new BytesRefBuilder(); - /** - * Implementations of {@link #readNextBatch} should call this before any - * values at the current position. - */ - protected final void startPosition() { - counts = ArrayUtil.grow(counts, positionCount + 1); - counts[positionCount] = 0; - } + /** + * Count of values at each position. + */ + private int[] counts = new int[ArrayUtil.oversize(10, Integer.BYTES)]; - /** - * Implementations of {@link #readNextBatch} should call this before adding - * each value to the current position to mark its start. - */ - protected final void addingValue() { - counts[positionCount]++; - valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1); - valueOffsets[currentValue++] = bytes.length(); - } + /** + * Offsets into the {@link #bytes} for each value. + */ + private int[] valueOffsets = new int[ArrayUtil.oversize(10, Integer.BYTES)]; - /** - * Implementations of {@link #readNextBatch} should call this to end - * the current position. - */ - protected final void endPosition() { - valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1); - valueOffsets[currentValue] = bytes.length(); - positionCount++; - } + /** + * The first position in the current batch. + */ + private int firstPosition; - /** - * Implementations of {@link #readNextBatch} should call this to encode - * an entirely null position. - */ - protected final void encodeNull() { - startPosition(); - addingValue(); - endPosition(); - } + /** + * The number of positions in the current batch. It's also the maximum index into + * {@link #counts} that has an meaning. + */ + private int positionCount; - /** - * The number of bytes in all entries in the batch. - */ - final int bytesLength() { - return bytes.length(); + /** + * The value being encoded right now. + */ + private int currentValue; + + /** + * Build the encoder. + * @param batchSize The number of bytes in a batch. We'll allocate this much memory for the + * encoder and only expand the allocation if the first entry in a batch + * doesn't fit into the buffer. + */ + MVEncoder(int batchSize) { + bytes.grow(batchSize); + } + + /** + * The first position in the current batch. + */ + protected final int firstPosition() { + return firstPosition; + } + + /** + * The number of positions in the current batch. + */ + @Override + public final int positionCount() { + return positionCount; + } + + /** + * The number of values at the position with this offset in the batch. + * The actual position in the block we're encoding is {@code positionOffset + firstPosition()}. + */ + @Override + public final int valueCount(int positionOffset) { + if (positionOffset >= positionCount) { + throw new IllegalArgumentException("wanted " + positionOffset + " but only have " + positionCount); + } + return counts[positionOffset]; + } + + /** + * Read the value at the specified index. Values at the first position + * start at index {@code 0} and advance one per value. So the values + * at position n start at {@code (0..n-1).sum(valueCount)}. There is + * no random-access way to get the first index for a position. + */ + @Override + public final int read(int index, BytesRefBuilder dst) { + int start = valueOffsets[index]; + int length = valueOffsets[index + 1] - start; + if (length > 0) { + dst.append(bytes.bytes(), start, length); + } + return length; + } + + /** + * Encodes the next batch of entries. This will encode values until the next + * value doesn't fit into the buffer. Callers should iterate on the values + * that have been encoded and then call this again for the next batch. + *

+ * It's possible for this batch to be empty if there isn't room for the + * first entry in the buffer. If so, call again to force the buffer to + * expand and encode that entry. + *

+ */ + @Override + public final void encodeNextBatch() { + bytes.clear(); + firstPosition += positionCount; + positionCount = 0; + currentValue = 0; + readNextBatch(); + } + + @Override + public long ramBytesUsed() { + return SHALLOW_SIZE + RamUsageEstimator.sizeOf(counts) + RamUsageEstimator.sizeOf(valueOffsets); + } + + /** + * Encodes the next batch of values. See {@link #encodeNextBatch()}. + */ + protected abstract void readNextBatch(); + + /** + * Implementations of {@link #readNextBatch} should call this before any + * values at the current position. + */ + protected final void startPosition() { + counts = ArrayUtil.grow(counts, positionCount + 1); + counts[positionCount] = 0; + } + + /** + * Implementations of {@link #readNextBatch} should call this before adding + * each value to the current position to mark its start. + */ + protected final void addingValue() { + counts[positionCount]++; + valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1); + valueOffsets[currentValue++] = bytes.length(); + } + + /** + * Implementations of {@link #readNextBatch} should call this to end + * the current position. + */ + protected final void endPosition() { + valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1); + valueOffsets[currentValue] = bytes.length(); + positionCount++; + } + + /** + * Implementations of {@link #readNextBatch} should call this to encode + * an entirely null position. + */ + protected final void encodeNull() { + startPosition(); + addingValue(); + endPosition(); + } + + /** + * The number of bytes in all entries in the batch. + */ + final int bytesLength() { + return bytes.length(); + } + + /** + * The maximum batch size. This starts the same as the constructor parameter + * but will grow if a single entry doesn't fit into the batch. + */ + final int bytesCapacity() { + return bytes.bytes().length; + } } - /** - * The maximum batch size. This starts the same as the constructor parameter - * but will grow if a single entry doesn't fit into the batch. - */ - final int bytesCapacity() { - return bytes.bytes().length; + protected abstract static class DirectEncoder extends BatchEncoder { + protected static final long BASE_RAM_USAGE = RamUsageEstimator.shallowSizeOfInstance(DirectEncoder.class); + protected final Block block; + private int blockPosition = -1; + private int valueCount; + + DirectEncoder(Block block) { + this.block = block; + } + + @Override + public final void encodeNextBatch() { + valueCount = block.getValueCount(++blockPosition); + } + + @Override + public final int positionCount() { + return Math.max(valueCount, 1); + } + + @Override + public final int valueCount(int positionOffset) { + assert positionOffset == 0 : positionOffset; + return positionCount(); + } + + @Override + public int read(int index, BytesRefBuilder dst) { + if (valueCount == 0) { + assert index == 0 : index; + return 0; + } else { + assert index < valueCount : index + " > " + valueCount; + return readValueAtBlockIndex(block.getFirstValueIndex(blockPosition) + index, dst); + } + } + + protected abstract int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst); + + @Override + public final long ramBytesUsed() { + return BASE_RAM_USAGE; + } } private static final VarHandle intHandle = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.nativeOrder()); - protected abstract static class Ints extends BatchEncoder { + protected abstract static class Ints extends MVEncoder { protected Ints(int batchSize) { super(batchSize); } @@ -267,9 +352,26 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int } } + protected static final class DirectInts extends DirectEncoder { + DirectInts(IntBlock block) { + super(block); + } + + @Override + protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) { + int before = dst.length(); + int after = before + Integer.BYTES; + dst.grow(after); + int v = ((IntBlock) block).getInt(valueIndex); + intHandle.set(dst.bytes(), before, v); + dst.setLength(after); + return Integer.BYTES; + } + } + private static final VarHandle longHandle = MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.nativeOrder()); - protected abstract static class Longs extends BatchEncoder { + protected abstract static class Longs extends MVEncoder { protected Longs(int batchSize) { super(batchSize); } @@ -300,6 +402,23 @@ protected final void encode(long v) { } } + protected static final class DirectLongs extends DirectEncoder { + DirectLongs(LongBlock block) { + super(block); + } + + @Override + protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) { + int before = dst.length(); + int after = before + Long.BYTES; + dst.grow(after); + long v = ((LongBlock) block).getLong(valueIndex); + longHandle.set(dst.bytes(), before, v); + dst.setLength(after); + return Long.BYTES; + } + } + private static class LongsDecoder implements Decoder { @Override public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) { @@ -320,7 +439,7 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int private static final VarHandle doubleHandle = MethodHandles.byteArrayViewVarHandle(double[].class, ByteOrder.nativeOrder()); - protected abstract static class Doubles extends BatchEncoder { + protected abstract static class Doubles extends MVEncoder { protected Doubles(int batchSize) { super(batchSize); } @@ -351,6 +470,23 @@ protected final void encode(double v) { } } + protected static final class DirectDoubles extends DirectEncoder { + DirectDoubles(DoubleBlock block) { + super(block); + } + + @Override + protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) { + int before = dst.length(); + int after = before + Double.BYTES; + dst.grow(after); + double v = ((DoubleBlock) block).getDouble(valueIndex); + doubleHandle.set(dst.bytes(), before, v); + dst.setLength(after); + return Double.BYTES; + } + } + private static class DoublesDecoder implements Decoder { @Override public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) { @@ -368,7 +504,7 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int } } - protected abstract static class Booleans extends BatchEncoder { + protected abstract static class Booleans extends MVEncoder { protected Booleans(int batchSize) { super(batchSize); } @@ -396,6 +532,19 @@ protected final void encode(boolean v) { } } + protected static final class DirectBooleans extends DirectEncoder { + DirectBooleans(BooleanBlock block) { + super(block); + } + + @Override + protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) { + var v = ((BooleanBlock) block).getBoolean(valueIndex); + dst.append((byte) (v ? 1 : 0)); + return 1; + } + } + private static class BooleansDecoder implements Decoder { @Override public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) { @@ -413,7 +562,7 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int } } - protected abstract static class BytesRefs extends BatchEncoder { + protected abstract static class BytesRefs extends MVEncoder { protected BytesRefs(int batchSize) { super(batchSize); } @@ -448,6 +597,25 @@ protected final void encode(BytesRef v) { } } + protected static final class DirectBytesRefs extends DirectEncoder { + private final BytesRef scratch = new BytesRef(); + + DirectBytesRefs(BytesRefBlock block) { + super(block); + } + + @Override + protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) { + var v = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch); + int start = dst.length(); + dst.grow(start + Integer.BYTES + v.length); + intHandle.set(dst.bytes(), start, v.length); + dst.setLength(start + Integer.BYTES); + dst.append(v); + return Integer.BYTES + v.length; + } + } + private static class BytesRefsDecoder implements Decoder { @Override public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java index f1619323686db..0ccf575fc030d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java @@ -59,23 +59,25 @@ protected Page process(Page page) { blockBuilders[i] = types[i].newBlockBuilder(rowsCount); } - BytesRefBlock input = (BytesRefBlock) inputEvaluator.eval(page); - BytesRef spare = new BytesRef(); - for (int row = 0; row < rowsCount; row++) { - if (input.isNull(row)) { - for (int i = 0; i < blockBuilders.length; i++) { - blockBuilders[i].appendNull(); + try (Block.Ref ref = inputEvaluator.eval(page)) { + BytesRefBlock input = (BytesRefBlock) ref.block(); + BytesRef spare = new BytesRef(); + for (int row = 0; row < rowsCount; row++) { + if (input.isNull(row)) { + for (int i = 0; i < blockBuilders.length; i++) { + blockBuilders[i].appendNull(); + } + continue; } - continue; + evaluator.computeRow(input, row, blockBuilders, spare); } - evaluator.computeRow(input, row, blockBuilders, spare); - } - Block[] blocks = new Block[blockBuilders.length]; - for (int i = 0; i < blockBuilders.length; i++) { - blocks[i] = blockBuilders[i].build(); + Block[] blocks = new Block[blockBuilders.length]; + for (int i = 0; i < blockBuilders.length; i++) { + blocks[i] = blockBuilders[i].build(); + } + return page.appendBlocks(blocks); } - return page.appendBlocks(blocks); } @Override @@ -94,6 +96,6 @@ public interface Evaluator { @Override public void close() { - Releasables.closeExpectNoException(inputEvaluator); + Releasables.closeExpectNoException(inputEvaluator, super::close); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 281693a487255..1a1604406892c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -181,7 +181,13 @@ private SubscribableListener runSingleLoopIteration() { if (op.isFinished() == false && nextOp.needsInput()) { Page page = op.getOutput(); - if (page != null && page.getPositionCount() != 0) { + if (page == null) { + // No result, just move to the next iteration + } else if (page.getPositionCount() == 0) { + // Empty result, release any memory it holds immediately and move to the next iteration + page.releaseBlocks(); + } else { + // Non-empty result from the previous operation, move it to the next operation nextOp.addInput(page); movedPage = true; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index aecbf07270a21..507ea5cdb9761 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -13,8 +13,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import java.util.stream.IntStream; - /** * Evaluates a tree of functions for every position in the block, resulting in a * new block which is appended to the page. @@ -43,8 +41,8 @@ public EvalOperator(ExpressionEvaluator evaluator) { @Override protected Page process(Page page) { - Block block = evaluator.eval(page); - block = maybeCopyBlock(page, block); + Block.Ref ref = evaluator.eval(page); + Block block = ref.floating() ? ref.block() : BlockUtils.deepCopyOf(ref.block()); return page.appendBlock(block); } @@ -55,17 +53,7 @@ public String toString() { @Override public void close() { - Releasables.closeExpectNoException(evaluator); - } - - /** Returns a copy of the give block, if the block appears in the page. */ - // TODO: this is a catch all, can be removed when we validate that evaluators always return copies - // for now it just looks like Attributes returns a reference? - static Block maybeCopyBlock(Page page, Block block) { - if (IntStream.range(0, page.getBlockCount()).mapToObj(page::getBlock).anyMatch(b -> b == block)) { - return BlockUtils.deepCopyOf(block); - } - return block; + Releasables.closeExpectNoException(evaluator, super::close); } /** @@ -80,13 +68,13 @@ interface Factory { /** * Evaluate the expression. */ - Block eval(Page page); + Block.Ref eval(Page page); } public static final ExpressionEvaluator CONSTANT_NULL = new ExpressionEvaluator() { @Override - public Block eval(Page page) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java index ec12bcac0e60f..db8e0c4708e2c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java @@ -41,39 +41,41 @@ protected Page process(Page page) { int rowCount = 0; int[] positions = new int[page.getPositionCount()]; - Block uncastTest = evaluator.eval(page); - if (uncastTest.areAllValuesNull()) { - // All results are null which is like false. No values selected. - return null; - } - BooleanBlock test = (BooleanBlock) uncastTest; - // TODO we can detect constant true or false from the type - // TODO or we could make a new method in bool-valued evaluators that returns a list of numbers - for (int p = 0; p < page.getPositionCount(); p++) { - if (test.isNull(p) || test.getValueCount(p) != 1) { - // Null is like false - // And, for now, multivalued results are like false too - continue; + try (Block.Ref ref = evaluator.eval(page)) { + if (ref.block().areAllValuesNull()) { + // All results are null which is like false. No values selected. + return null; } - if (test.getBoolean(test.getFirstValueIndex(p))) { - positions[rowCount++] = p; + BooleanBlock test = (BooleanBlock) ref.block(); + // TODO we can detect constant true or false from the type + // TODO or we could make a new method in bool-valued evaluators that returns a list of numbers + for (int p = 0; p < page.getPositionCount(); p++) { + if (test.isNull(p) || test.getValueCount(p) != 1) { + // Null is like false + // And, for now, multivalued results are like false too + continue; + } + if (test.getBoolean(test.getFirstValueIndex(p))) { + positions[rowCount++] = p; + } } - } - if (rowCount == 0) { - return null; - } - if (rowCount == page.getPositionCount()) { - return page; - } - positions = Arrays.copyOf(positions, rowCount); + if (rowCount == 0) { + page.releaseBlocks(); + return null; + } + if (rowCount == page.getPositionCount()) { + return page; + } + positions = Arrays.copyOf(positions, rowCount); - Block[] filteredBlocks = new Block[page.getBlockCount()]; - for (int i = 0; i < page.getBlockCount(); i++) { - filteredBlocks[i] = page.getBlock(i).filter(positions); - } + Block[] filteredBlocks = new Block[page.getBlockCount()]; + for (int i = 0; i < page.getBlockCount(); i++) { + filteredBlocks[i] = page.getBlock(i).filter(positions); + } - return new Page(filteredBlocks); + return new Page(filteredBlocks); + } } @Override @@ -83,6 +85,6 @@ public String toString() { @Override public void close() { - Releasables.closeExpectNoException(evaluator); + Releasables.closeExpectNoException(evaluator, super::close); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java index a6fc7484bbb9e..902fa90a502ca 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java @@ -25,13 +25,13 @@ public final class MultivalueDedupe { * Remove duplicate values from each position and write the results to a * {@link Block} using an adaptive algorithm based on the size of the input list. */ - public static Block dedupeToBlockAdaptive(Block block) { - return switch (block.elementType()) { - case BOOLEAN -> new MultivalueDedupeBoolean((BooleanBlock) block).dedupeToBlock(); - case BYTES_REF -> new MultivalueDedupeBytesRef((BytesRefBlock) block).dedupeToBlockAdaptive(); - case INT -> new MultivalueDedupeInt((IntBlock) block).dedupeToBlockAdaptive(); - case LONG -> new MultivalueDedupeLong((LongBlock) block).dedupeToBlockAdaptive(); - case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).dedupeToBlockAdaptive(); + public static Block.Ref dedupeToBlockAdaptive(Block.Ref ref) { + return switch (ref.block().elementType()) { + case BOOLEAN -> new MultivalueDedupeBoolean(ref).dedupeToBlock(); + case BYTES_REF -> new MultivalueDedupeBytesRef(ref).dedupeToBlockAdaptive(); + case INT -> new MultivalueDedupeInt(ref).dedupeToBlockAdaptive(); + case LONG -> new MultivalueDedupeLong(ref).dedupeToBlockAdaptive(); + case DOUBLE -> new MultivalueDedupeDouble(ref).dedupeToBlockAdaptive(); default -> throw new IllegalArgumentException(); }; } @@ -42,13 +42,13 @@ public static Block dedupeToBlockAdaptive(Block block) { * case complexity for larger. Prefer {@link #dedupeToBlockAdaptive} * which picks based on the number of elements at each position. */ - public static Block dedupeToBlockUsingCopyMissing(Block block) { - return switch (block.elementType()) { - case BOOLEAN -> new MultivalueDedupeBoolean((BooleanBlock) block).dedupeToBlock(); - case BYTES_REF -> new MultivalueDedupeBytesRef((BytesRefBlock) block).dedupeToBlockUsingCopyMissing(); - case INT -> new MultivalueDedupeInt((IntBlock) block).dedupeToBlockUsingCopyMissing(); - case LONG -> new MultivalueDedupeLong((LongBlock) block).dedupeToBlockUsingCopyMissing(); - case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).dedupeToBlockUsingCopyMissing(); + public static Block.Ref dedupeToBlockUsingCopyMissing(Block.Ref ref) { + return switch (ref.block().elementType()) { + case BOOLEAN -> new MultivalueDedupeBoolean(ref).dedupeToBlock(); + case BYTES_REF -> new MultivalueDedupeBytesRef(ref).dedupeToBlockUsingCopyMissing(); + case INT -> new MultivalueDedupeInt(ref).dedupeToBlockUsingCopyMissing(); + case LONG -> new MultivalueDedupeLong(ref).dedupeToBlockUsingCopyMissing(); + case DOUBLE -> new MultivalueDedupeDouble(ref).dedupeToBlockUsingCopyMissing(); default -> throw new IllegalArgumentException(); }; } @@ -61,13 +61,13 @@ public static Block dedupeToBlockUsingCopyMissing(Block block) { * performance is dominated by the {@code n*log n} sort. Prefer * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ - public static Block dedupeToBlockUsingCopyAndSort(Block block) { - return switch (block.elementType()) { - case BOOLEAN -> new MultivalueDedupeBoolean((BooleanBlock) block).dedupeToBlock(); - case BYTES_REF -> new MultivalueDedupeBytesRef((BytesRefBlock) block).dedupeToBlockUsingCopyAndSort(); - case INT -> new MultivalueDedupeInt((IntBlock) block).dedupeToBlockUsingCopyAndSort(); - case LONG -> new MultivalueDedupeLong((LongBlock) block).dedupeToBlockUsingCopyAndSort(); - case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).dedupeToBlockUsingCopyAndSort(); + public static Block.Ref dedupeToBlockUsingCopyAndSort(Block.Ref ref) { + return switch (ref.block().elementType()) { + case BOOLEAN -> new MultivalueDedupeBoolean(ref).dedupeToBlock(); + case BYTES_REF -> new MultivalueDedupeBytesRef(ref).dedupeToBlockUsingCopyAndSort(); + case INT -> new MultivalueDedupeInt(ref).dedupeToBlockUsingCopyAndSort(); + case LONG -> new MultivalueDedupeLong(ref).dedupeToBlockUsingCopyAndSort(); + case DOUBLE -> new MultivalueDedupeDouble(ref).dedupeToBlockUsingCopyAndSort(); default -> throw new IllegalArgumentException(); }; } @@ -80,37 +80,37 @@ public static ExpressionEvaluator.Factory evaluator(ElementType elementType, Exp return switch (elementType) { case BOOLEAN -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override - public Block eval(Page page) { - return new MultivalueDedupeBoolean((BooleanBlock) field.eval(page)).dedupeToBlock(); + public Block.Ref eval(Page page) { + return new MultivalueDedupeBoolean(field.eval(page)).dedupeToBlock(); } }; case BYTES_REF -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override - public Block eval(Page page) { - return new MultivalueDedupeBytesRef((BytesRefBlock) field.eval(page)).dedupeToBlockAdaptive(); + public Block.Ref eval(Page page) { + return new MultivalueDedupeBytesRef(field.eval(page)).dedupeToBlockAdaptive(); } }; case INT -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override - public Block eval(Page page) { - return new MultivalueDedupeInt((IntBlock) field.eval(page)).dedupeToBlockAdaptive(); + public Block.Ref eval(Page page) { + return new MultivalueDedupeInt(field.eval(page)).dedupeToBlockAdaptive(); } }; case LONG -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override - public Block eval(Page page) { - return new MultivalueDedupeLong((LongBlock) field.eval(page)).dedupeToBlockAdaptive(); + public Block.Ref eval(Page page) { + return new MultivalueDedupeLong(field.eval(page)).dedupeToBlockAdaptive(); } }; case DOUBLE -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override - public Block eval(Page page) { - return new MultivalueDedupeDouble((DoubleBlock) field.eval(page)).dedupeToBlockAdaptive(); + public Block.Ref eval(Page page) { + return new MultivalueDedupeDouble(field.eval(page)).dedupeToBlockAdaptive(); } }; case NULL -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { return field.eval(page); // The page is all nulls and when you dedupe that it's still all nulls } }; @@ -128,17 +128,28 @@ public record HashResult(IntBlock ords, boolean sawNull) {} * and then encodes the results into a {@link byte[]} which can be used for * things like hashing many fields together. */ - public static BatchEncoder batchEncoder(Block block, int batchSize) { - // TODO collect single-valued block handling here. And maybe vector. And maybe all null? - // TODO check for for unique multivalued fields and for ascending multivalue fields. - return switch (block.elementType()) { - case BOOLEAN -> new MultivalueDedupeBoolean((BooleanBlock) block).batchEncoder(batchSize); - case BYTES_REF -> new MultivalueDedupeBytesRef((BytesRefBlock) block).batchEncoder(batchSize); - case INT -> new MultivalueDedupeInt((IntBlock) block).batchEncoder(batchSize); - case LONG -> new MultivalueDedupeLong((LongBlock) block).batchEncoder(batchSize); - case DOUBLE -> new MultivalueDedupeDouble((DoubleBlock) block).batchEncoder(batchSize); - default -> throw new IllegalArgumentException(); - }; + public static BatchEncoder batchEncoder(Block.Ref ref, int batchSize, boolean allowDirectEncoder) { + var elementType = ref.block().elementType(); + if (allowDirectEncoder && ref.block().mvDeduplicated()) { + var block = ref.block(); + return switch (elementType) { + case BOOLEAN -> new BatchEncoder.DirectBooleans((BooleanBlock) block); + case BYTES_REF -> new BatchEncoder.DirectBytesRefs((BytesRefBlock) block); + case INT -> new BatchEncoder.DirectInts((IntBlock) block); + case LONG -> new BatchEncoder.DirectLongs((LongBlock) block); + case DOUBLE -> new BatchEncoder.DirectDoubles((DoubleBlock) block); + default -> throw new IllegalArgumentException("Unknown [" + elementType + "]"); + }; + } else { + return switch (elementType) { + case BOOLEAN -> new MultivalueDedupeBoolean(ref).batchEncoder(batchSize); + case BYTES_REF -> new MultivalueDedupeBytesRef(ref).batchEncoder(batchSize); + case INT -> new MultivalueDedupeInt(ref).batchEncoder(batchSize); + case LONG -> new MultivalueDedupeLong(ref).batchEncoder(batchSize); + case DOUBLE -> new MultivalueDedupeDouble(ref).batchEncoder(batchSize); + default -> throw new IllegalArgumentException(); + }; + } } private abstract static class MvDedupeEvaluator implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java index 39f0bbedd6732..b26e243947a5b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupeBoolean.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; @@ -29,20 +30,22 @@ public class MultivalueDedupeBoolean { */ public static final int TRUE_ORD = 2; + private final Block.Ref ref; private final BooleanBlock block; private boolean seenTrue; private boolean seenFalse; - public MultivalueDedupeBoolean(BooleanBlock block) { - this.block = block; + public MultivalueDedupeBoolean(Block.Ref ref) { + this.ref = ref; + this.block = (BooleanBlock) ref.block(); } /** * Dedupe values using an adaptive algorithm based on the size of the input list. */ - public BooleanBlock dedupeToBlock() { + public Block.Ref dedupeToBlock() { if (false == block.mayHaveMultivaluedFields()) { - return block; + return ref; } BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -57,7 +60,7 @@ public BooleanBlock dedupeToBlock() { } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 996561121df8f..8f320815f995e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -27,7 +27,6 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.BlockOrdinalsReader; import org.elasticsearch.compute.lucene.ValueSourceInfo; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.HashAggregationOperator.GroupSpec; @@ -234,18 +233,31 @@ protected boolean lessThan(AggregatedResultIterator a, AggregatedResultIterator }; final List aggregators = createGroupingAggregators(); try { + boolean seenNulls = false; + for (OrdinalSegmentAggregator agg : ordinalAggregators.values()) { + if (agg.seenNulls()) { + seenNulls = true; + for (int i = 0; i < aggregators.size(); i++) { + aggregators.get(i).addIntermediateRow(0, agg.aggregators.get(i), 0); + } + } + } for (OrdinalSegmentAggregator agg : ordinalAggregators.values()) { final AggregatedResultIterator it = agg.getResultIterator(); if (it.next()) { pq.add(it); } } - int position = -1; + final int startPosition = seenNulls ? 0 : -1; + int position = startPosition; final BytesRefBuilder lastTerm = new BytesRefBuilder(); var blockBuilder = BytesRefBlock.newBlockBuilder(1); + if (seenNulls) { + blockBuilder.appendNull(); + } while (pq.size() > 0) { final AggregatedResultIterator top = pq.top(); - if (position == -1 || lastTerm.get().equals(top.currentTerm) == false) { + if (position == startPosition || lastTerm.get().equals(top.currentTerm) == false) { position++; lastTerm.copyBytes(top.currentTerm); blockBuilder.appendBytesRef(top.currentTerm); @@ -338,11 +350,8 @@ void addInput(IntVector docs, Page page) { if (BlockOrdinalsReader.canReuse(currentReader, docs.getInt(0)) == false) { currentReader = new BlockOrdinalsReader(withOrdinals.ordinalsValues(leafReaderContext)); } - final IntBlock ordinals = currentReader.readOrdinals(docs); + final IntBlock ordinals = currentReader.readOrdinalsAdded1(docs); for (int p = 0; p < ordinals.getPositionCount(); p++) { - if (ordinals.isNull(p)) { - continue; - } int start = ordinals.getFirstValueIndex(p); int end = start + ordinals.getValueCount(p); for (int i = start; i < end; i++) { @@ -350,8 +359,8 @@ void addInput(IntVector docs, Page page) { visitedOrds.set(ord); } } - for (GroupingAggregator aggregator : aggregators) { - aggregator.prepareProcessPage(this, page).add(0, ordinals); + for (GroupingAggregatorFunction.AddInput addInput : prepared) { + addInput.add(0, ordinals); } } catch (IOException e) { throw new UncheckedIOException(e); @@ -362,6 +371,10 @@ AggregatedResultIterator getResultIterator() throws IOException { return new AggregatedResultIterator(aggregators, visitedOrds, withOrdinals.ordinalsValues(leafReaderContext)); } + boolean seenNulls() { + return visitedOrds.get(0); + } + @Override public BitArray seenGroupIds(BigArrays bigArrays) { BitArray seen = new BitArray(0, bigArrays); @@ -377,7 +390,7 @@ public void close() { private static class AggregatedResultIterator { private BytesRef currentTerm; - private long currentOrd = -1; + private long currentOrd = 0; private final List aggregators; private final BitArray ords; private final SortedSetDocValues dv; @@ -395,8 +408,9 @@ int currentPosition() { boolean next() throws IOException { currentOrd = ords.nextSetBit(currentOrd + 1); + assert currentOrd > 0 : currentOrd; if (currentOrd < Long.MAX_VALUE) { - currentTerm = dv.lookupOrd(currentOrd); + currentTerm = dv.lookupOrd(currentOrd - 1); return true; } else { currentTerm = null; @@ -448,4 +462,49 @@ public void close() { Releasables.close(extractor, aggregator); } } + + static final class BlockOrdinalsReader { + private final SortedSetDocValues sortedSetDocValues; + private final Thread creationThread; + + BlockOrdinalsReader(SortedSetDocValues sortedSetDocValues) { + this.sortedSetDocValues = sortedSetDocValues; + this.creationThread = Thread.currentThread(); + } + + IntBlock readOrdinalsAdded1(IntVector docs) throws IOException { + final int positionCount = docs.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + for (int p = 0; p < positionCount; p++) { + int doc = docs.getInt(p); + if (false == sortedSetDocValues.advanceExact(doc)) { + builder.appendInt(0); + continue; + } + int count = sortedSetDocValues.docValueCount(); + // TODO don't come this way if there are a zillion ords on the field + if (count == 1) { + builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd() + 1)); + continue; + } + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd() + 1)); + } + builder.endPositionEntry(); + } + return builder.build(); + } + + int docID() { + return sortedSetDocValues.docID(); + } + + /** + * Checks if the reader can be used to read a range documents starting with the given docID by the current thread. + */ + static boolean canReuse(BlockOrdinalsReader reader, int startingDocID) { + return reader != null && reader.creationThread == Thread.currentThread() && reader.docID() <= startingDocID; + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java index 59772cfa9f33d..21375b72ac6f6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java @@ -64,63 +64,65 @@ protected Page process(Page page) { blockBuilders[i] = BytesRefBlock.newBlockBuilder(rowsCount); } - BytesRefBlock input = (BytesRefBlock) inputEvaluator.eval(page); - BytesRef spare = new BytesRef(); - for (int row = 0; row < rowsCount; row++) { - if (input.isNull(row)) { - for (int i = 0; i < fieldNames.length; i++) { - blockBuilders[i].appendNull(); - } - continue; - } - - int position = input.getFirstValueIndex(row); - int valueCount = input.getValueCount(row); - if (valueCount == 1) { - Map items = parser.apply(input.getBytesRef(position, spare).utf8ToString()); - if (items == null) { + try (Block.Ref ref = inputEvaluator.eval(page)) { + BytesRefBlock input = (BytesRefBlock) ref.block(); + BytesRef spare = new BytesRef(); + for (int row = 0; row < rowsCount; row++) { + if (input.isNull(row)) { for (int i = 0; i < fieldNames.length; i++) { blockBuilders[i].appendNull(); } continue; } - for (int i = 0; i < fieldNames.length; i++) { - String val = items.get(fieldNames[i]); - BlockUtils.appendValue(blockBuilders[i], val, ElementType.BYTES_REF); - } - } else { - // multi-valued input - String[] firstValues = new String[fieldNames.length]; - boolean[] positionEntryOpen = new boolean[fieldNames.length]; - for (int c = 0; c < valueCount; c++) { - Map items = parser.apply(input.getBytesRef(position + c, spare).utf8ToString()); + + int position = input.getFirstValueIndex(row); + int valueCount = input.getValueCount(row); + if (valueCount == 1) { + Map items = parser.apply(input.getBytesRef(position, spare).utf8ToString()); if (items == null) { + for (int i = 0; i < fieldNames.length; i++) { + blockBuilders[i].appendNull(); + } continue; } for (int i = 0; i < fieldNames.length; i++) { String val = items.get(fieldNames[i]); - if (val == null) { + BlockUtils.appendValue(blockBuilders[i], val, ElementType.BYTES_REF); + } + } else { + // multi-valued input + String[] firstValues = new String[fieldNames.length]; + boolean[] positionEntryOpen = new boolean[fieldNames.length]; + for (int c = 0; c < valueCount; c++) { + Map items = parser.apply(input.getBytesRef(position + c, spare).utf8ToString()); + if (items == null) { continue; } - if (firstValues[i] == null) { - firstValues[i] = val; - } else { - if (positionEntryOpen[i] == false) { - positionEntryOpen[i] = true; - blockBuilders[i].beginPositionEntry(); - BlockUtils.appendValue(blockBuilders[i], firstValues[i], ElementType.BYTES_REF); + for (int i = 0; i < fieldNames.length; i++) { + String val = items.get(fieldNames[i]); + if (val == null) { + continue; + } + if (firstValues[i] == null) { + firstValues[i] = val; + } else { + if (positionEntryOpen[i] == false) { + positionEntryOpen[i] = true; + blockBuilders[i].beginPositionEntry(); + BlockUtils.appendValue(blockBuilders[i], firstValues[i], ElementType.BYTES_REF); + } + BlockUtils.appendValue(blockBuilders[i], val, ElementType.BYTES_REF); } - BlockUtils.appendValue(blockBuilders[i], val, ElementType.BYTES_REF); } } - } - for (int i = 0; i < fieldNames.length; i++) { - if (positionEntryOpen[i]) { - blockBuilders[i].endPositionEntry(); - } else if (firstValues[i] == null) { - blockBuilders[i].appendNull(); - } else { - BlockUtils.appendValue(blockBuilders[i], firstValues[i], ElementType.BYTES_REF); + for (int i = 0; i < fieldNames.length; i++) { + if (positionEntryOpen[i]) { + blockBuilders[i].endPositionEntry(); + } else if (firstValues[i] == null) { + blockBuilders[i].appendNull(); + } else { + BlockUtils.appendValue(blockBuilders[i], firstValues[i], ElementType.BYTES_REF); + } } } } @@ -140,6 +142,6 @@ public String toString() { @Override public void close() { - Releasables.closeExpectNoException(inputEvaluator); + Releasables.closeExpectNoException(inputEvaluator, super::close); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st index 337b095ebe8d0..3d7a21c0f12df 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st @@ -53,12 +53,14 @@ $elseif(long)$ private static final int ALWAYS_COPY_MISSING = 300; $endif$ + private final Block.Ref ref; private final $Type$Block block; private $type$[] work = new $type$[ArrayUtil.oversize(2, $BYTES$)]; private int w; - public MultivalueDedupe$Type$($Type$Block block) { - this.block = block; + public MultivalueDedupe$Type$(Block.Ref ref) { + this.ref = ref; + this.block = ($Type$Block) ref.block(); $if(BytesRef)$ // TODO very large numbers might want a hash based implementation - and for BytesRef that might not be that big fillWork(0, work.length); @@ -69,9 +71,9 @@ $endif$ * Remove duplicate values from each position and write the results to a * {@link Block} using an adaptive algorithm based on the size of the input list. */ - public $Type$Block dedupeToBlockAdaptive() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockAdaptive() { + if (block.mvDeduplicated()) { + return ref; } $Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -112,7 +114,7 @@ $endif$ } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -121,9 +123,9 @@ $endif$ * case complexity for larger. Prefer {@link #dedupeToBlockAdaptive} * which picks based on the number of elements at each position. */ - public $Type$Block dedupeToBlockUsingCopyAndSort() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyAndSort() { + if (block.mvDeduplicated()) { + return ref; } $Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -142,7 +144,7 @@ $endif$ } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** @@ -153,9 +155,9 @@ $endif$ * performance is dominated by the {@code n*log n} sort. Prefer * {@link #dedupeToBlockAdaptive} unless you need the results sorted. */ - public $Type$Block dedupeToBlockUsingCopyMissing() { - if (false == block.mayHaveMultivaluedFields()) { - return block; + public Block.Ref dedupeToBlockUsingCopyMissing() { + if (block.mvDeduplicated()) { + return ref; } $Type$Block.Builder builder = $Type$Block.newBlockBuilder(block.getPositionCount()); for (int p = 0; p < block.getPositionCount(); p++) { @@ -174,7 +176,7 @@ $endif$ } } } - return builder.build(); + return Block.Ref.floating(builder.build()); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java index d52f25e9d8306..0fb6ec6f63d96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java @@ -36,10 +36,6 @@ public record ExchangeSinkOperatorFactory(Supplier exchangeSinks, implements SinkOperatorFactory { - public ExchangeSinkOperatorFactory(Supplier exchangeSinks) { - this(exchangeSinks, Function.identity()); - } - @Override public SinkOperator get(DriverContext driverContext) { return new ExchangeSinkOperator(exchangeSinks.get(), transformer); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java index b8a41a3ee343d..bd2027cade78f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java @@ -9,12 +9,14 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.core.Releasable; /** * Builds {@link Block}s from keys and values encoded into {@link BytesRef}s. */ -interface ResultBuilder { +interface ResultBuilder extends Releasable { /** * Called for each sort key before {@link #decodeValue} to consume the sort key and * store the value of the key for {@link #decodeValue} can use it to reconstruct @@ -36,15 +38,21 @@ interface ResultBuilder { */ Block build(); - static ResultBuilder resultBuilderFor(ElementType elementType, TopNEncoder encoder, boolean inKey, int positions) { + static ResultBuilder resultBuilderFor( + BlockFactory blockFactory, + ElementType elementType, + TopNEncoder encoder, + boolean inKey, + int positions + ) { return switch (elementType) { - case BOOLEAN -> new ResultBuilderForBoolean(encoder, inKey, positions); - case BYTES_REF -> new ResultBuilderForBytesRef(encoder, inKey, positions); - case INT -> new ResultBuilderForInt(encoder, inKey, positions); - case LONG -> new ResultBuilderForLong(encoder, inKey, positions); - case DOUBLE -> new ResultBuilderForDouble(encoder, inKey, positions); - case NULL -> new ResultBuilderForNull(); - case DOC -> new ResultBuilderForDoc(positions); + case BOOLEAN -> new ResultBuilderForBoolean(blockFactory, encoder, inKey, positions); + case BYTES_REF -> new ResultBuilderForBytesRef(blockFactory, encoder, inKey, positions); + case INT -> new ResultBuilderForInt(blockFactory, encoder, inKey, positions); + case LONG -> new ResultBuilderForLong(blockFactory, encoder, inKey, positions); + case DOUBLE -> new ResultBuilderForDouble(blockFactory, encoder, inKey, positions); + case NULL -> new ResultBuilderForNull(blockFactory); + case DOC -> new ResultBuilderForDoc(blockFactory, positions); default -> { assert false : "Result builder for [" + elementType + "]"; throw new UnsupportedOperationException("Result builder for [" + elementType + "]"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java index 166d5be83b474..7fb507ffdbead 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java @@ -13,12 +13,15 @@ import org.elasticsearch.compute.data.DocVector; class ResultBuilderForDoc implements ResultBuilder { + private final BlockFactory blockFactory; private final int[] shards; private final int[] segments; private final int[] docs; private int position; - ResultBuilderForDoc(int positions) { + ResultBuilderForDoc(BlockFactory blockFactory, int positions) { + // TODO use fixed length builders + this.blockFactory = blockFactory; this.shards = new int[positions]; this.segments = new int[positions]; this.docs = new int[positions]; @@ -40,9 +43,9 @@ public void decodeValue(BytesRef values) { @Override public Block build() { return new DocVector( - BlockFactory.getNonBreakingInstance().newIntArrayVector(shards, position), - BlockFactory.getNonBreakingInstance().newIntArrayVector(segments, position), - BlockFactory.getNonBreakingInstance().newIntArrayVector(docs, position), + blockFactory.newIntArrayVector(shards, position), + blockFactory.newIntArrayVector(segments, position), + blockFactory.newIntArrayVector(docs, position), null ).asBlock(); } @@ -51,4 +54,9 @@ public Block build() { public String toString() { return "ValueExtractorForDoc"; } + + @Override + public void close() { + // TODO memory accounting + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java index 05b9ba2a07658..a45f16fc30910 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java @@ -9,10 +9,16 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; public class ResultBuilderForNull implements ResultBuilder { + private final BlockFactory blockFactory; private int positions; + public ResultBuilderForNull(BlockFactory blockFactory) { + this.blockFactory = blockFactory; + } + @Override public void decodeKey(BytesRef keys) { throw new AssertionError("somehow got a value for a null key"); @@ -29,11 +35,16 @@ public void decodeValue(BytesRef values) { @Override public Block build() { - return Block.constantNullBlock(positions); + return Block.constantNullBlock(positions, blockFactory); } @Override public String toString() { return "ValueExtractorForNull"; } + + @Override + public void close() { + // Nothing to close + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 86b3a18992db4..9657d60376763 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -12,7 +12,9 @@ import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -205,7 +207,15 @@ public record TopNOperatorFactory( @Override public TopNOperator get(DriverContext driverContext) { - return new TopNOperator(driverContext.breaker(), topCount, elementTypes, encoders, sortOrders, maxPageSize); + return new TopNOperator( + driverContext.blockFactory(), + driverContext.breaker(), + topCount, + elementTypes, + encoders, + sortOrders, + maxPageSize + ); } @Override @@ -222,6 +232,7 @@ public String describe() { } } + private final BlockFactory blockFactory; private final CircuitBreaker breaker; private final Queue inputQueue; @@ -231,9 +242,11 @@ public String describe() { private final List encoders; private final List sortOrders; + private Row spare; private Iterator output; public TopNOperator( + BlockFactory blockFactory, CircuitBreaker breaker, int topCount, List elementTypes, @@ -241,6 +254,7 @@ public TopNOperator( List sortOrders, int maxPageSize ) { + this.blockFactory = blockFactory; this.breaker = breaker; this.maxPageSize = maxPageSize; this.elementTypes = elementTypes; @@ -301,21 +315,20 @@ public void addInput(Page page) { * and must be closed. That happens either because it's overflow from the * inputQueue or because we hit an allocation failure while building it. */ - Row row = null; try { for (int i = 0; i < page.getPositionCount(); i++) { - if (row == null) { - row = new Row(breaker); + if (spare == null) { + spare = new Row(breaker); } else { - row.keys.clear(); - row.orderByCompositeKeyAscending.clear(); - row.values.clear(); + spare.keys.clear(); + spare.orderByCompositeKeyAscending.clear(); + spare.values.clear(); } - rowFiller.row(i, row); - row = inputQueue.insertWithOverflow(row); + rowFiller.row(i, spare); + spare = inputQueue.insertWithOverflow(spare); } } finally { - Releasables.close(row); + Releasables.close(() -> page.releaseBlocks()); } } @@ -327,18 +340,24 @@ public void finish() { } private Iterator toPages() { + if (spare != null) { + // Remove the spare, we're never going to use it again. + spare.close(); + spare = null; + } if (inputQueue.size() == 0) { return Collections.emptyIterator(); } List list = new ArrayList<>(inputQueue.size()); + List result = new ArrayList<>(); + ResultBuilder[] builders = null; + boolean success = false; try { while (inputQueue.size() > 0) { list.add(inputQueue.pop()); } Collections.reverse(list); - List result = new ArrayList<>(); - ResultBuilder[] builders = null; int p = 0; int size = 0; for (int i = 0; i < list.size(); i++) { @@ -347,6 +366,7 @@ private Iterator toPages() { builders = new ResultBuilder[elementTypes.size()]; for (int b = 0; b < builders.length; b++) { builders[b] = ResultBuilder.resultBuilderFor( + blockFactory, elementTypes.get(b), encoders.get(b).toUnsortable(), channelInKey(sortOrders, b), @@ -386,14 +406,22 @@ private Iterator toPages() { p++; if (p == size) { result.add(new Page(Arrays.stream(builders).map(ResultBuilder::build).toArray(Block[]::new))); + Releasables.closeExpectNoException(builders); builders = null; } - } assert builders == null; + success = true; return result.iterator(); } finally { - Releasables.closeExpectNoException(() -> Releasables.close(list)); + if (success == false) { + List close = new ArrayList<>(list); + for (Page p : result) { + close.add(p::releaseBlocks); + } + Collections.addAll(close, builders); + Releasables.closeExpectNoException(Releasables.wrap(close)); + } } } @@ -422,10 +450,15 @@ public Page getOutput() { @Override public void close() { /* - * If everything went well we'll have drained inputQueue to this'll - * be a noop. But if inputQueue + * If we close before calling finish then spare and inputQueue will be live rows + * that need closing. If we close after calling finish then the output iterator + * will contain pages of results that have yet to be returned. */ - Releasables.closeExpectNoException(() -> Releasables.close(inputQueue)); + Releasables.closeExpectNoException( + spare, + inputQueue == null ? null : Releasables.wrap(inputQueue), + output == null ? null : Releasables.wrap(() -> Iterators.map(output, p -> p::releaseBlocks)) + ); } private static long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(TopNOperator.class) + RamUsageEstimator diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st index 9ec03270da093..dbe0b23af93bb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st @@ -10,7 +10,6 @@ package org.elasticsearch.compute.operator.topn; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.$Type$Block; import org.elasticsearch.compute.data.$Type$Vector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -22,11 +21,11 @@ abstract class KeyExtractorFor$Type$ implements KeyExtractor { return new KeyExtractorFor$Type$.ForVector(encoder, nul, nonNul, v); } if (ascending) { - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorFor$Type$.MinForAscending(encoder, nul, nonNul, block) : new KeyExtractorFor$Type$.MinForUnordered(encoder, nul, nonNul, block); } - return block.mvOrdering() == Block.MvOrdering.ASCENDING + return block.mvSortedAscending() ? new KeyExtractorFor$Type$.MaxForAscending(encoder, nul, nonNul, block) : new KeyExtractorFor$Type$.MaxForUnordered(encoder, nul, nonNul, block); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st index 5f9a35bd0ebd3..ebe62398c8504 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.$Type$Block; class ResultBuilderFor$Type$ implements ResultBuilder { @@ -26,14 +27,14 @@ $endif$ */ private $type$ key; - ResultBuilderFor$Type$(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderFor$Type$(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { $if(BytesRef)$ this.encoder = encoder; $else$ assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); $endif$ this.inKey = inKey; - this.builder = $Type$Block.newBlockBuilder(initialSize); + this.builder = $Type$Block.newBlockBuilder(initialSize, blockFactory); } @Override @@ -81,4 +82,9 @@ $endif$ public String toString() { return "ResultBuilderFor$Type$[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 04a966b399870..bdf696f460060 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -287,7 +287,7 @@ public void testLimitOperator() { try ( var driver = new Driver( driverContext, - new SequenceLongBlockSourceOperator(values, 100), + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), new PageConsumerOperator(page -> { LongBlock block = page.getBlock(0); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java deleted file mode 100644 index 8fa38b6864674..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute; - -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.BlockFactoryParameters; -import org.elasticsearch.indices.breaker.CircuitBreakerService; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class TestBlockFactoryParameters implements BlockFactoryParameters { - - final CircuitBreaker breaker; - final BigArrays bigArrays; - - public TestBlockFactoryParameters() { - breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - var breakerService = mock(CircuitBreakerService.class); - when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker); - bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breakerService); - } - - @Override - public CircuitBreaker breaker() { - return breaker; - } - - @Override - public BigArrays bigArrays() { - return bigArrays; - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index a4b6c8b965962..22325039af124 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -91,8 +91,8 @@ protected final ByteSizeValue smallEnoughToCircuitBreak() { public final void testIgnoresNulls() { int end = between(1_000, 100_000); List results = new ArrayList<>(); - List input = CannedSourceOperator.collectPages(simpleInput(end)); DriverContext driverContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), end)); try ( Driver d = new Driver( @@ -111,7 +111,9 @@ public final void testIgnoresNulls() { public final void testMultivalued() { int end = between(1_000, 100_000); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(new PositionMergingSourceOperator(simpleInput(end))); + List input = CannedSourceOperator.collectPages( + new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end)) + ); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } @@ -119,7 +121,7 @@ public final void testMultivaluedWithNulls() { int end = between(1_000, 100_000); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages( - new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(end))) + new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end))) ); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java index 11241020a6709..623de7fdd1fff 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,8 +22,8 @@ public class CountAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLong())); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLong())); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java index 74cd88feed3f4..b44eb0ba1bd3f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceBooleanBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,8 +22,8 @@ public class CountDistinctBooleanAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { - return new SequenceBooleanBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> randomBoolean()).toList()); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceBooleanBlockSourceOperator(blockFactory, LongStream.range(0, size).mapToObj(l -> randomBoolean()).toList()); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java index eab1b9cb2d8de..7360b101bf79d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongBooleanTupleBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongBooleanTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomBoolean())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java index 69ccc0a04c0f9..c495a6b9f196b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.BytesRefBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -23,7 +24,7 @@ public class CountDistinctBytesRefAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, Math.min(Integer.MAX_VALUE, Integer.MAX_VALUE / size)); return new BytesRefBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> new BytesRef(String.valueOf(between(-max, max)))).toList() diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java index 919d06af430fd..eadbba9f91880 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongBytesRefTupleBlockSourceOperator; @@ -35,7 +36,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongBytesRefTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), new BytesRef(String.valueOf(between(1, 10000))))) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java index c0678441cdc74..ccfe7b426ebca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -23,7 +24,7 @@ public class CountDistinctDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java index 5a928f12d33b7..0c4d89da09b99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -34,7 +35,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomDoubleBetween(0, 100, true))) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 3699a87431937..b67e4cdee7e97 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -29,7 +29,7 @@ public class CountDistinctIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, Math.min(Integer.MAX_VALUE, Integer.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(-max, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java index f2a46e9f4c3af..678024c19d391 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -34,7 +35,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), between(0, 10000)))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 556f9d0ccc462..704b5c649f744 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -30,9 +30,9 @@ public class CountDistinctLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java index a5959471b8e15..4282adaba595e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,9 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomLongBetween(0, 100_000))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java index 54a35fcc19cb2..945c68711bb4e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -33,9 +34,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { if (randomBoolean()) { return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index eab6eb30261bd..4ae58fd8c6333 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -147,7 +147,9 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { public final void testNullGroupsAndValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(simpleInput(end))); + List input = CannedSourceOperator.collectPages( + new NullInsertingSourceOperator(simpleInput(driverContext.blockFactory(), end)) + ); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -155,7 +157,7 @@ public final void testNullGroupsAndValues() { public final void testNullGroups() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -184,7 +186,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl public final void testNullValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -192,7 +194,7 @@ public final void testNullValues() { public final void testNullValuesInitialIntermediateFinal() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end))); List results = drive( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -220,7 +222,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl public final void testMultivalued() { DriverContext driverContext = driverContext(); int end = between(1_000, 100_000); - List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -228,7 +230,9 @@ public final void testMultivalued() { public final void testMulitvaluedNullGroupsAndValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages( + new NullInsertingSourceOperator(mergeValues(simpleInput(driverContext.blockFactory(), end))) + ); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -236,7 +240,7 @@ public final void testMulitvaluedNullGroupsAndValues() { public final void testMulitvaluedNullGroup() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(driverContext.blockFactory(), end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -244,7 +248,7 @@ public final void testMulitvaluedNullGroup() { public final void testMulitvaluedNullValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(driverContext.blockFactory(), end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -295,12 +299,13 @@ private void assertNullOnly(List operators) { public final void testNullSome() { DriverContext driverContext = driverContext(); - assertNullSome(List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); + assertNullSome(driverContext, List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); } public final void testNullSomeInitialFinal() { DriverContext driverContext = driverContext(); assertNullSome( + driverContext, List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.FINAL).get(driverContext) @@ -311,6 +316,7 @@ public final void testNullSomeInitialFinal() { public final void testNullSomeInitialIntermediateFinal() { DriverContext driverContext = driverContext(); assertNullSome( + driverContext, List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INTERMEDIATE).get(driverContext), @@ -322,8 +328,8 @@ public final void testNullSomeInitialIntermediateFinal() { /** * Run the agg on some data where one group is always null. */ - private void assertNullSome(List operators) { - List inputData = CannedSourceOperator.collectPages(simpleInput(1000)); + private void assertNullSome(DriverContext driverContext, List operators) { + List inputData = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 1000)); SeenGroups seenGroups = seenGroups(inputData); long nullGroup = randomFrom(seenGroups.nonNull); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java index b67220b4909b7..cfda483d029f6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class MaxDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java index 3750aec95f3a7..9a2c8bc17685d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -24,7 +25,7 @@ public class MaxDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java index 72cfa06222b50..e76021b883120 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,7 +21,7 @@ public class MaxIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(IntStream.range(0, size).map(l -> randomInt())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java index 9ffee498eeba2..313e10be39855 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt()))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java index 4e84f2e672b97..a51aa98f7a5a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,9 +21,9 @@ public class MaxLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java index e284f2a6103d1..a1f44e128c2e1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,11 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong()))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java index 74bda421a545e..1c14a8e7855ce 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,7 +23,7 @@ public class MedianAbsoluteDeviationDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(1.2, 1.25, 2.0, 2.0, 4.3, 6.0, 9.0); Randomness.shuffle(values); return new SequenceDoubleBlockSourceOperator(values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java index 6751486453f30..06ddb2a734f8c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { double[][] samples = new double[][] { { 1.2, 1.25, 2.0, 2.0, 4.3, 6.0, 9.0 }, { 0.1, 1.5, 2.0, 3.0, 4.0, 7.5, 100.0 }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java index 20506cc5c8f93..40e422b6efc26 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,7 +23,7 @@ public class MedianAbsoluteDeviationIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(12, 125, 20, 20, 43, 60, 90); Randomness.shuffle(values); return new SequenceIntBlockSourceOperator(values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java index 20f62c67a16cc..2f00764f6fe51 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { int[][] samples = new int[][] { { 12, 125, 20, 20, 43, 60, 90 }, { 1, 15, 20, 30, 40, 75, 1000 }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java index d80415f83daa2..465bb5800bbb6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,10 +23,10 @@ public class MedianAbsoluteDeviationLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(12L, 125L, 20L, 20L, 43L, 60L, 90L); Randomness.shuffle(values); - return new SequenceLongBlockSourceOperator(values); + return new SequenceLongBlockSourceOperator(blockFactory, values); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java index c3cebad8e0e0b..2c6bfc1204591 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { long[][] samples = new long[][] { { 12, 125, 20, 20, 43, 60, 90 }, { 1, 15, 20, 30, 40, 75, 1000 }, @@ -42,7 +43,7 @@ protected SourceOperator simpleInput(int end) { values.add(Tuple.tuple((long) i, v)); } } - return new TupleBlockSourceOperator(values); + return new TupleBlockSourceOperator(blockFactory, values); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java index 622302d549fd0..7e0b7241cf258 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class MinDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java index 12c63e354547a..7c4141f4a7ad1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -23,7 +24,7 @@ public class MinDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java index 2dc0e893875ab..dc1ab1398fb90 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,7 +21,7 @@ public class MinIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(IntStream.range(0, size).map(l -> randomInt())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java index 4ffbe9b1396d3..55cfc2d124e5f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt()))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java index 25a420237893e..91feb141ac74b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,9 +21,9 @@ public class MinLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java index 311e7e41ed9ac..02dda3fe3c236 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,11 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong()))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java index 96e61d4782022..61f26cd0209b3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -41,7 +42,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java index c0d6595e088eb..9495e78ec47ca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -42,7 +43,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java index c34a01e608d1a..37d153f7bcae6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -40,7 +41,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(0, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java index a018fba96e897..948e156e52c85 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -42,7 +43,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new LongIntBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), between(-1, max))) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java index cf0b18840d91e..eb32dac18ea80 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -40,9 +41,9 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, 1_000_000); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(0, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(0, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java index 609526532b72e..6360be8595ff8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -42,9 +43,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size / 5); return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLongBetween(-0, max))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 767f9a2d5c25b..d3dc262419008 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; @@ -28,7 +29,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java index 03a7269b84690..8b86d99653282 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -23,7 +24,7 @@ public class SumDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index e6fccf2d46f61..736386fae3dec 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -27,7 +27,7 @@ public class SumIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(-max, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java index 71666024c819d..0b8678a0e3f05 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -32,7 +33,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new LongIntBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), between(-max, max))) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index ae5aaa5b21965..e9523c5583cd4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -27,9 +27,9 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override @@ -53,7 +53,7 @@ public void testOverflowFails() { try ( Driver d = new Driver( driverContext, - new SequenceLongBlockSourceOperator(LongStream.of(Long.MAX_VALUE - 1, 2)), + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java index e0dc918b515d6..827dc06a4f542 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -32,9 +33,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size / 5); return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLongBetween(-max, max))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index cf7fbbea1c775..f99ded96a9984 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -906,10 +906,12 @@ void assertZeroPositionsAndRelease(Vector vector) { void releaseAndAssertBreaker(Block... blocks) { assertThat(breaker.getUsed(), greaterThan(0L)); + Page[] pages = Arrays.stream(blocks).map(Page::new).toArray(Page[]::new); Releasables.closeExpectNoException(blocks); Arrays.stream(blocks).forEach(block -> assertThat(block.isReleased(), is(true))); Arrays.stream(blocks).forEach(BasicBlockTests::assertCannotDoubleRelease); - Arrays.stream(blocks).forEach(BasicBlockTests::assertCannotReadFromPage); + Arrays.stream(pages).forEach(BasicBlockTests::assertCannotReadFromPage); + Arrays.stream(blocks).forEach(BasicBlockTests::assertCannotAddToPage); assertThat(breaker.getUsed(), is(0L)); } @@ -924,12 +926,16 @@ static void assertCannotDoubleRelease(Block block) { assertThat(ex.getMessage(), containsString("can't release already released block")); } - static void assertCannotReadFromPage(Block block) { - Page page = new Page(block); + static void assertCannotReadFromPage(Page page) { var e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); } + static void assertCannotAddToPage(Block block) { + var e = expectThrows(IllegalArgumentException.class, () -> new Page(block)); + assertThat(e.getMessage(), containsString("can't build page out of released blocks but")); + } + static int randomPosition(int positionCount) { return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java index de552d242afa2..3c822da7b5586 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java @@ -7,47 +7,50 @@ package org.elasticsearch.compute.data; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.List; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class BlockBuilderTests extends ESTestCase { - - public void testAllNullsInt() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(IntBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); + @ParametersFactory + public static List params() { + List params = new ArrayList<>(); + for (ElementType elementType : ElementType.values()) { + if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + continue; + } + params.add(new Object[] { elementType }); } + return params; } - public void testAllNullsLong() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(LongBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); - } - } + private final ElementType elementType; - public void testAllNullsDouble() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(DoubleBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); - } + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + + public BlockBuilderTests(ElementType elementType) { + this.elementType = elementType; } - public void testAllNullsBytesRef() { + public void testAllNulls() { for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(BytesRefBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(0, blockFactory), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(100, blockFactory), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(1000, blockFactory), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(randomIntBetween(0, 100), blockFactory), numEntries); } } @@ -55,14 +58,128 @@ private void testAllNullsImpl(Block.Builder builder, int numEntries) { for (int i = 0; i < numEntries; i++) { builder.appendNull(); } - Block block = builder.build(); - assertThat(block.getPositionCount(), is(numEntries)); - assertThat(block.isNull(0), is(true)); - assertThat(block.isNull(numEntries - 1), is(true)); - assertThat(block.isNull(randomPosition(numEntries)), is(true)); + try (Block block = builder.build()) { + assertThat(block.getPositionCount(), is(numEntries)); + assertThat(block.isNull(0), is(true)); + assertThat(block.isNull(numEntries - 1), is(true)); + assertThat(block.isNull(randomPosition(numEntries)), is(true)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } static int randomPosition(int positionCount) { return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); } + + public void testCloseWithoutBuilding() { + elementType.newBlockBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBuildSmallSingleValued() { + testBuild(between(1, 100), false, 1); + } + + public void testBuildHugeSingleValued() { + testBuild(between(1_000, 50_000), false, 1); + } + + public void testBuildSmallSingleValuedNullable() { + testBuild(between(1, 100), true, 1); + } + + public void testBuildHugeSingleValuedNullable() { + testBuild(between(1_000, 50_000), true, 1); + } + + public void testBuildSmallMultiValued() { + testBuild(between(1, 100), false, 3); + } + + public void testBuildHugeMultiValued() { + testBuild(between(1_000, 50_000), false, 3); + } + + public void testBuildSmallMultiValuedNullable() { + testBuild(between(1, 100), true, 3); + } + + public void testBuildHugeMultiValuedNullable() { + testBuild(between(1_000, 50_000), true, 3); + } + + public void testBuildSingle() { + testBuild(1, false, 1); + } + + private void testBuild(int size, boolean nullable, int maxValueCount) { + try (Block.Builder builder = elementType.newBlockBuilder(randomBoolean() ? size : 1, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, nullable, 1, maxValueCount, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testDoubleBuild() { + try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testCranky() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + for (int i = 0; i < 100; i++) { + try { + try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + } + + public void testCrankyConstantBlock() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + for (int i = 0; i < 100; i++) { + try { + try (Block.Builder builder = elementType.newBlockBuilder(randomInt(10), blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 1, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built.asVector().isConstant(), is(true)); + assertThat(built, equalTo(random.block())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index a524221dd50d7..9c6c9d966b3f6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -49,11 +49,19 @@ public static BlockFactory blockFactory(ByteSizeValue size) { @ParametersFactory public static List params() { - List> l = List.of(() -> { - CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - return BlockFactory.getInstance(breaker, bigArrays); - }, BlockFactory::getGlobalInstance); + List> l = List.of(new Supplier<>() { + @Override + public BlockFactory get() { + CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + return BlockFactory.getInstance(breaker, bigArrays); + } + + @Override + public String toString() { + return "1gb"; + } + }); return l.stream().map(s -> new Object[] { s }).toList(); } @@ -555,13 +563,16 @@ static Block.MvOrdering randomOrdering() { } void releaseAndAssertBreaker(T data) { + Page page = data instanceof Block block ? new Page(block) : null; assertThat(breaker.getUsed(), greaterThan(0L)); Releasables.closeExpectNoException(data); if (data instanceof Block block) { assertThat(block.isReleased(), is(true)); - Page page = new Page(block); - var e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); + Exception e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); + + e = expectThrows(IllegalArgumentException.class, () -> new Page(block)); + assertThat(e.getMessage(), containsString("can't build page out of released blocks")); } assertThat(breaker.getUsed(), is(0L)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java index a5637128705ca..dd61c8f6478d3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java @@ -77,6 +77,7 @@ public static void readInto(List> values, Page page) { for (int i = 0; i < page.getBlockCount(); i++) { readInto(values.get(i), page.getBlock(i)); } + page.releaseBlocks(); } public static void readInto(List values, Block block) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index 0eb9beec2e7f9..ee654497c1ec3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -18,7 +18,6 @@ import java.util.Arrays; import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class BytesRefBlockEqualityTests extends ESTestCase { @@ -332,10 +331,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - BytesRefBlock block1 = builder.build(); - BytesRefBlock block2 = builder.build(); + BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + BytesRefBlock block1 = builder1.build(); + BytesRefBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -365,15 +368,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder3 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendBytesRef(new BytesRef(Integer.toHexString(randomInt())))); + for (int i = 0; i < values; i++) { + BytesRef value = new BytesRef(Integer.toHexString(randomInt())); + builder1.appendBytesRef(value); + builder2.appendBytesRef(value); + builder3.appendBytesRef(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - BytesRefBlock block1 = builder.build(); - BytesRefBlock block2 = builder.build(); - BytesRefBlock block3 = builder.build(); + BytesRefBlock block1 = builder1.build(); + BytesRefBlock block2 = builder2.build(); + BytesRefBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index d8258ab28a078..465dc95a15ea4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; @@ -98,47 +99,55 @@ public void testRandomShardSegmentDocMap() { } private void assertShardSegmentDocMap(int[][] data, int[][] expected) { - DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length); - for (int r = 0; r < data.length; r++) { - builder.appendShard(data[r][0]); - builder.appendSegment(data[r][1]); - builder.appendDoc(data[r][2]); + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length, blockFactory)) { + for (int r = 0; r < data.length; r++) { + builder.appendShard(data[r][0]); + builder.appendSegment(data[r][1]); + builder.appendDoc(data[r][2]); + } + try (DocVector docVector = builder.build().asVector()) { + int[] forwards = docVector.shardSegmentDocMapForwards(); + + int[][] result = new int[docVector.getPositionCount()][]; + for (int p = 0; p < result.length; p++) { + result[p] = new int[] { + docVector.shards().getInt(forwards[p]), + docVector.segments().getInt(forwards[p]), + docVector.docs().getInt(forwards[p]) }; + } + assertThat(result, equalTo(expected)); + + int[] backwards = docVector.shardSegmentDocMapBackwards(); + for (int p = 0; p < result.length; p++) { + result[p] = new int[] { + docVector.shards().getInt(backwards[forwards[p]]), + docVector.segments().getInt(backwards[forwards[p]]), + docVector.docs().getInt(backwards[forwards[p]]) }; + } + + assertThat(result, equalTo(data)); + } } - DocVector docVector = builder.build().asVector(); - int[] forwards = docVector.shardSegmentDocMapForwards(); - - int[][] result = new int[docVector.getPositionCount()][]; - for (int p = 0; p < result.length; p++) { - result[p] = new int[] { - docVector.shards().getInt(forwards[p]), - docVector.segments().getInt(forwards[p]), - docVector.docs().getInt(forwards[p]) }; - } - assertThat(result, equalTo(expected)); - - int[] backwards = docVector.shardSegmentDocMapBackwards(); - for (int p = 0; p < result.length; p++) { - result[p] = new int[] { - docVector.shards().getInt(backwards[forwards[p]]), - docVector.segments().getInt(backwards[forwards[p]]), - docVector.docs().getInt(backwards[forwards[p]]) }; - } - - assertThat(result, equalTo(data)); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } public void testCannotDoubleRelease() { var block = new DocVector(IntVector.range(0, 2), IntBlock.newConstantBlockWith(0, 2).asVector(), IntVector.range(0, 2), null) .asBlock(); assertThat(block.isReleased(), is(false)); + Page page = new Page(block); + Releasables.closeExpectNoException(block); assertThat(block.isReleased(), is(true)); - var ex = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(ex.getMessage(), containsString("can't release already released block")); + Exception e = expectThrows(IllegalStateException.class, () -> block.close()); + assertThat(e.getMessage(), containsString("can't release already released block")); - Page page = new Page(block); - var e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); + e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); + + e = expectThrows(IllegalArgumentException.class, () -> new Page(block)); + assertThat(e.getMessage(), containsString("can't build page out of released blocks")); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index 2abbcc0b989f1..7dda97f52834e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class DoubleBlockEqualityTests extends ESTestCase { @@ -224,10 +223,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - DoubleBlock block1 = builder.build(); - DoubleBlock block2 = builder.build(); + DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + DoubleBlock block1 = builder1.build(); + DoubleBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -248,15 +251,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder3 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendDouble(randomDouble())); + for (int i = 0; i < values; i++) { + double value = randomDouble(); + builder1.appendDouble(value); + builder2.appendDouble(value); + builder3.appendDouble(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - DoubleBlock block1 = builder.build(); - DoubleBlock block2 = builder.build(); - DoubleBlock block3 = builder.build(); + DoubleBlock block1 = builder1.build(); + DoubleBlock block2 = builder2.build(); + DoubleBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index c4e19106d4368..40c84324f13d2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class IntBlockEqualityTests extends ESTestCase { @@ -185,10 +184,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - IntBlock block1 = builder.build(); - IntBlock block2 = builder.build(); + IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + IntBlock block1 = builder1.build(); + IntBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -210,15 +213,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder3 = IntBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendInt(randomInt())); + for (int i = 0; i < values; i++) { + int value = randomInt(); + builder1.appendInt(value); + builder2.appendInt(value); + builder3.appendInt(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - IntBlock block1 = builder.build(); - IntBlock block2 = builder.build(); - IntBlock block3 = builder.build(); + IntBlock block1 = builder1.build(); + IntBlock block2 = builder2.build(); + IntBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index 3d08b2a96d635..a24b4a4dd6fa6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class LongBlockEqualityTests extends ESTestCase { @@ -191,10 +190,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = LongBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - LongBlock block1 = builder.build(); - LongBlock block2 = builder.build(); + LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + LongBlock block1 = builder1.build(); + LongBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -216,15 +219,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder3 = LongBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); - int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendLong(randomLong())); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); + int valueCount = randomIntBetween(1, 16); + for (int i = 0; i < valueCount; i++) { + long value = randomLong(); + builder1.appendLong(value); + builder2.appendLong(value); + builder3.appendLong(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - LongBlock block1 = builder.build(); - LongBlock block2 = builder.build(); - LongBlock block3 = builder.build(); + LongBlock block1 = builder1.build(); + LongBlock block2 = builder2.build(); + LongBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index 4684da93a661a..d9377a490368d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -139,6 +139,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public IntBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestLongBlockBuilder extends TestBlockBuilder { @@ -195,6 +200,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public LongBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestDoubleBlockBuilder extends TestBlockBuilder { @@ -251,6 +261,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public DoubleBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestBytesRefBlockBuilder extends TestBlockBuilder { @@ -307,6 +322,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public BytesRefBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestBooleanBlockBuilder extends TestBlockBuilder { @@ -366,5 +386,10 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public BooleanBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java new file mode 100644 index 0000000000000..04ccf47ea6122 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class VectorBuilderTests extends ESTestCase { + @ParametersFactory + public static List params() { + List params = new ArrayList<>(); + for (ElementType elementType : ElementType.values()) { + if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + continue; + } + params.add(new Object[] { elementType }); + } + return params; + } + + private final ElementType elementType; + + public VectorBuilderTests(ElementType elementType) { + this.elementType = elementType; + } + + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + vectorBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBuildSmall() { + testBuild(between(1, 100)); + } + + public void testBuildHuge() { + testBuild(between(1_000, 50_000)); + } + + public void testBuildSingle() { + testBuild(1); + } + + private void testBuild(int size) { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Vector.Builder builder = vectorBuilder(randomBoolean() ? size : 1, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testDoubleBuild() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testCranky() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + for (int i = 0; i < 100; i++) { + try { + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + } + + private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return switch (elementType) { + case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case BOOLEAN -> BooleanVector.newVectorBuilder(estimatedSize, blockFactory); + case BYTES_REF -> BytesRefVector.newVectorBuilder(estimatedSize, blockFactory); + case DOUBLE -> DoubleVector.newVectorBuilder(estimatedSize, blockFactory); + case INT -> IntVector.newVectorBuilder(estimatedSize, blockFactory); + case LONG -> LongVector.newVectorBuilder(estimatedSize, blockFactory); + }; + } + + private void fill(Vector.Builder builder, Vector from) { + switch (elementType) { + case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case BOOLEAN -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((BooleanVector.Builder) builder).appendBoolean(((BooleanVector) from).getBoolean(p)); + } + } + case BYTES_REF -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((BytesRefVector.Builder) builder).appendBytesRef(((BytesRefVector) from).getBytesRef(p, new BytesRef())); + } + } + case DOUBLE -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((DoubleVector.Builder) builder).appendDouble(((DoubleVector) from).getDouble(p)); + } + } + case INT -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((IntVector.Builder) builder).appendInt(((IntVector) from).getInt(p)); + } + } + case LONG -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((LongVector.Builder) builder).appendLong(((LongVector) from).getLong(p)); + } + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index 9fa9f7e32c654..3c46fef7e5257 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -45,6 +45,12 @@ public VectorFixedBuilderTests(ElementType elementType) { this.elementType = elementType; } + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + vectorBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + public void testBuildSmall() { testBuild(between(1, 100)); } @@ -59,43 +65,52 @@ public void testBuildSingle() { private void testBuild(int size) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - Vector.Builder builder = vectorBuilder(size, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); - assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + try (Vector.Builder builder = vectorBuilder(size, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } public void testDoubleBuild() { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - Vector.Builder builder = vectorBuilder(10, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); } - Exception e = expectThrows(IllegalStateException.class, builder::build); - assertThat(e.getMessage(), equalTo("already closed")); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } public void testCranky() { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); - try { - Vector.Builder builder = vectorBuilder(10, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); + for (int i = 0; i < 100; i++) { + try { + Vector.Builder builder = vectorBuilder(10, blockFactory); + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } - // If we made it this far cranky didn't fail us! - } catch (CircuitBreakingException e) { - logger.info("cranky", e); - assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } - assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } private Vector.Builder vectorBuilder(int size, BlockFactory blockFactory) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java new file mode 100644 index 0000000000000..9893cd2b2a023 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.AnyOperatorTestCase; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.OperatorTestCase; +import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.SearchContext; +import org.junit.After; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class LuceneCountOperatorTests extends AnyOperatorTestCase { + private Directory directory = newDirectory(); + private IndexReader reader; + + @After + public void closeIndex() throws IOException { + IOUtils.close(reader, directory); + } + + @Override + protected LuceneCountOperator.Factory simple(BigArrays bigArrays) { + return simple(bigArrays, randomFrom(DataPartitioning.values()), between(1, 10_000), 100); + } + + private LuceneCountOperator.Factory simple(BigArrays bigArrays, DataPartitioning dataPartitioning, int numDocs, int limit) { + int commitEvery = Math.max(1, numDocs / 10); + try ( + RandomIndexWriter writer = new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + for (int d = 0; d < numDocs; d++) { + List doc = new ArrayList<>(); + doc.add(new SortedNumericDocValuesField("s", d)); + writer.addDocument(doc); + if (d % commitEvery == 0) { + writer.commit(); + } + } + reader = writer.getReader(); + } catch (IOException e) { + throw new RuntimeException(e); + } + + SearchContext ctx = mockSearchContext(reader); + SearchExecutionContext ectx = mock(SearchExecutionContext.class); + when(ctx.getSearchExecutionContext()).thenReturn(ectx); + when(ectx.getIndexReader()).thenReturn(reader); + Function queryFunction = c -> new MatchAllDocsQuery(); + return new LuceneCountOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, limit); + } + + @Override + protected String expectedToStringOfSimple() { + assumeFalse("can't support variable maxPageSize", true); // TODO allow testing this + return "LuceneCountOperator[shardId=0, maxPageSize=**random**]"; + } + + @Override + protected String expectedDescriptionOfSimple() { + assumeFalse("can't support variable maxPageSize", true); // TODO allow testing this + return """ + LuceneCountOperator[dataPartitioning = SHARD, maxPageSize = **random**, limit = 100, sorts = [{"s":{"order":"asc"}}]]"""; + } + + // TODO tests for the other data partitioning configurations + + public void testShardDataPartitioning() { + int size = between(1_000, 20_000); + int limit = between(10, size); + testCount(size, limit); + } + + public void testEmpty() { + testCount(0, between(10, 10_000)); + } + + private void testCount(int size, int limit) { + DriverContext ctx = driverContext(); + LuceneCountOperator.Factory factory = simple(nonBreakingBigArrays(), DataPartitioning.SHARD, size, limit); + + List results = new ArrayList<>(); + OperatorTestCase.runDriver(new Driver(ctx, factory.get(ctx), List.of(), new PageConsumerOperator(results::add), () -> {})); + OperatorTestCase.assertDriverContext(ctx); + + assertThat(results, hasSize(1)); + Page page = results.get(0); + + assertThat(page.getPositionCount(), is(1)); + assertThat(page.getBlockCount(), is(2)); + LongBlock lb = page.getBlock(0); + assertThat(lb.getPositionCount(), is(1)); + assertThat(lb.getLong(0), is((long) Math.min(size, limit))); + BooleanBlock bb = page.getBlock(1); + assertThat(bb.getBoolean(1), is(true)); + } + + /** + * Creates a mock search context with the given index reader. + * The returned mock search context can be used to test with {@link LuceneOperator}. + */ + public static SearchContext mockSearchContext(IndexReader reader) { + try { + ContextIndexSearcher searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + TrivialQueryCachingPolicy.NEVER, + true + ); + SearchContext searchContext = mock(SearchContext.class); + when(searchContext.searcher()).thenReturn(searcher); + return searchContext; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index 60d5dd394afb7..fad1f793122d8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -16,12 +16,12 @@ public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase { public static LuceneSourceOperator.Status simple() { - return new LuceneSourceOperator.Status(0, 1, 5, 123, 99990); + return new LuceneSourceOperator.Status(0, 0, 1, 5, 123, 99990, 8000); } public static String simpleToJson() { return """ - {"processed_sliced":0,"total_slices":1,"slice_position":123,"slice_size":99990,"pages_emitted":5}"""; + {"processed_slices":0,"slice_index":0,"total_slices":1,"pages_emitted":5,"slice_min":123,"slice_max":99990,"current":8000}"""; } public void testToXContent() { @@ -40,49 +40,32 @@ public LuceneSourceOperator.Status createTestInstance() { randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeInt(), + randomNonNegativeInt(), + randomNonNegativeInt(), randomNonNegativeInt() ); } @Override protected LuceneSourceOperator.Status mutateInstance(LuceneSourceOperator.Status instance) { - return switch (between(0, 4)) { - case 0 -> new LuceneSourceOperator.Status( - randomValueOtherThan(instance.currentLeaf(), ESTestCase::randomNonNegativeInt), - instance.totalLeaves(), - instance.pagesEmitted(), - instance.slicePosition(), - instance.sliceSize() - ); - case 1 -> new LuceneSourceOperator.Status( - instance.currentLeaf(), - randomValueOtherThan(instance.totalLeaves(), ESTestCase::randomNonNegativeInt), - instance.pagesEmitted(), - instance.slicePosition(), - instance.sliceSize() - ); - case 2 -> new LuceneSourceOperator.Status( - instance.currentLeaf(), - instance.totalLeaves(), - randomValueOtherThan(instance.pagesEmitted(), ESTestCase::randomNonNegativeInt), - instance.slicePosition(), - instance.sliceSize() - ); - case 3 -> new LuceneSourceOperator.Status( - instance.currentLeaf(), - instance.totalLeaves(), - instance.pagesEmitted(), - randomValueOtherThan(instance.slicePosition(), ESTestCase::randomNonNegativeInt), - instance.sliceSize() - ); - case 4 -> new LuceneSourceOperator.Status( - instance.currentLeaf(), - instance.totalLeaves(), - instance.pagesEmitted(), - instance.slicePosition(), - randomValueOtherThan(instance.sliceSize(), ESTestCase::randomNonNegativeInt) - ); + int processedSlices = instance.processedSlices(); + int sliceIndex = instance.sliceIndex(); + int totalSlices = instance.totalSlices(); + int pagesEmitted = instance.pagesEmitted(); + int sliceMin = instance.sliceMin(); + int sliceMax = instance.sliceMax(); + int current = instance.current(); + switch (between(0, 6)) { + case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt); + case 1 -> sliceIndex = randomValueOtherThan(sliceIndex, ESTestCase::randomNonNegativeInt); + case 2 -> totalSlices = randomValueOtherThan(totalSlices, ESTestCase::randomNonNegativeInt); + case 3 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + case 4 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt); + case 5 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt); + case 6 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt); default -> throw new UnsupportedOperationException(); - }; + } + ; + return new LuceneSourceOperator.Status(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 64edcaa43d89b..4c0e33e5cfb82 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; @@ -115,7 +116,7 @@ static Operator.OperatorFactory factory(IndexReader reader, ValuesSourceType vsT } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { // The test wants more than one segment. We shoot for about 10. int commitEvery = Math.max(1, size / 10); try ( @@ -198,21 +199,35 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testLoadAll() { - loadSimpleAndAssert(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024)))); + DriverContext driverContext = driverContext(); + loadSimpleAndAssert( + driverContext, + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ); } public void testLoadAllInOnePage() { + DriverContext driverContext = driverContext(); loadSimpleAndAssert( - List.of(CannedSourceOperator.mergePages(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024))))) + driverContext, + List.of( + CannedSourceOperator.mergePages( + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ) + ) ); } public void testEmpty() { - loadSimpleAndAssert(CannedSourceOperator.collectPages(simpleInput(0))); + DriverContext driverContext = driverContext(); + loadSimpleAndAssert(driverContext, CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 0))); } public void testLoadAllInOnePageShuffled() { - Page source = CannedSourceOperator.mergePages(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024)))); + DriverContext driverContext = driverContext(); + Page source = CannedSourceOperator.mergePages( + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ); List shuffleList = new ArrayList<>(); IntStream.range(0, source.getPositionCount()).forEach(i -> shuffleList.add(i)); Randomness.shuffle(shuffleList); @@ -222,11 +237,10 @@ public void testLoadAllInOnePageShuffled() { shuffledBlocks[b] = source.getBlock(b).filter(shuffleArray); } source = new Page(shuffledBlocks); - loadSimpleAndAssert(List.of(source)); + loadSimpleAndAssert(driverContext, List.of(source)); } - private void loadSimpleAndAssert(List input) { - DriverContext driverContext = driverContext(); + private void loadSimpleAndAssert(DriverContext driverContext, List input) { List results = new ArrayList<>(); List operators = List.of( factory( @@ -314,7 +328,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvKeywords.getBytesRef(offset + v, new BytesRef()).utf8ToString(), equalTo(PREFIX[v] + key)); } if (key % 3 > 0) { - assertThat(mvKeywords.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvKeywords.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(bools.getBoolean(i), equalTo(key % 2 == 0)); @@ -324,7 +338,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvBools.getBoolean(offset + v), equalTo(BOOLEANS[key % 3][v])); } if (key % 3 > 0) { - assertThat(mvBools.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvBools.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(mvInts.getValueCount(i), equalTo(key % 3 + 1)); @@ -333,7 +347,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvInts.getInt(offset + v), equalTo(1_000 * key + v)); } if (key % 3 > 0) { - assertThat(mvInts.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvInts.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(mvLongs.getValueCount(i), equalTo(key % 3 + 1)); @@ -342,7 +356,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvLongs.getLong(offset + v), equalTo(-1_000L * key + v)); } if (key % 3 > 0) { - assertThat(mvLongs.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvLongs.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } assertThat(doubles.getDouble(i), equalTo(key / 123_456d)); @@ -351,7 +365,7 @@ private void loadSimpleAndAssert(List input) { assertThat(mvDoubles.getDouble(offset + v), equalTo(key / 123_456d + v)); } if (key % 3 > 0) { - assertThat(mvDoubles.mvOrdering(), equalTo(Block.MvOrdering.ASCENDING)); + assertThat(mvDoubles.mvOrdering(), equalTo(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING)); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 9eaa1e333f66e..784d5134e9608 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -28,9 +29,9 @@ public class AggregationOperatorTests extends ForkingOperatorTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index 8f995d9a31bc3..5d8fa81e565a7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -17,6 +17,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import org.junit.After; + +import java.util.ArrayList; +import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.matchesPattern; @@ -95,7 +99,26 @@ protected final BigArrays nonBreakingBigArrays() { /** * A {@link DriverContext} with a nonBreakingBigArrays. */ - protected DriverContext driverContext() { + protected DriverContext driverContext() { // TODO make this final and return a breaking block factory return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); } + + private final List breakers = new ArrayList<>(); + + /** + * A {@link DriverContext} with a breaking {@link BigArrays} and {@link BlockFactory}. + */ + protected DriverContext breakingDriverContext() { // TODO move this to driverContext once everyone supports breaking + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + } + + @After + public void allBreakersEmpty() { + for (CircuitBreaker breaker : breakers) { + assertThat(breaker.getUsed(), equalTo(0L)); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index d5b07a713b8b4..7a77d6bbb082c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -26,6 +26,10 @@ public static List collectPages(SourceOperator source) { if (in == null) { continue; } + if (in.getPositionCount() == 0) { + in.releaseBlocks(); + continue; + } pages.add(in); } return pages; @@ -53,6 +57,25 @@ public static Page mergePages(List pages) { return new Page(blocks); } + /** + * Make a deep copy of some pages. Useful so that when the originals are + * released the copies are still live. + */ + public static List deepCopyOf(List pages) { + List out = new ArrayList<>(pages.size()); + for (Page p : pages) { + Block[] blocks = new Block[p.getBlockCount()]; + for (int b = 0; b < blocks.length; b++) { + Block orig = p.getBlock(b); + Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount()); + builder.copyFrom(orig, 0, p.getPositionCount()); + blocks[b] = builder.build(); + } + out.add(new Page(blocks)); + } + return out; + } + private final Iterator page; public CannedSourceOperator(Iterator page) { @@ -77,5 +100,9 @@ public Page getOutput() { } @Override - public void close() {} + public void close() { + while (page.hasNext()) { + page.next().releaseBlocks(); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 7825e035df0db..8e0be216ed477 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; @@ -24,7 +25,7 @@ public class ColumnExtractOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List input = LongStream.range(0, end) .mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l)) .collect(Collectors.toList()); @@ -51,8 +52,8 @@ protected Operator.OperatorFactory simple(BigArrays bigArrays) { new ElementType[] { ElementType.BYTES_REF }, dvrCtx -> new EvalOperator.ExpressionEvaluator() { @Override - public Block eval(Page page) { - return page.getBlock(0); + public Block.Ref eval(Page page) { + return new Block.Ref(page.getBlock(0), page); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 156f37d8d8e7a..720e0089f3f26 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -10,31 +10,53 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Tuple; import java.util.List; +import java.util.Set; +import java.util.TreeSet; +import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; +import static org.hamcrest.Matchers.equalTo; + public class EvalOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { - return new TupleBlockSourceOperator(LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { + return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } - record Addition(int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { + record Addition(DriverContext driverContext, int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { LongVector lhsVector = page.getBlock(0).asVector(); LongVector rhsVector = page.getBlock(1).asVector(); - LongVector.Builder result = LongVector.newVectorBuilder(page.getPositionCount()); - for (int p = 0; p < page.getPositionCount(); p++) { - result.appendLong(lhsVector.getLong(p) + rhsVector.getLong(p)); + try (LongVector.FixedBuilder result = LongVector.newVectorFixedBuilder(page.getPositionCount(), driverContext.blockFactory())) { + for (int p = 0; p < page.getPositionCount(); p++) { + result.appendLong(lhsVector.getLong(p) + rhsVector.getLong(p)); + } + return Block.Ref.floating(result.build().asBlock()); } - return result.build().asBlock(); + } + + @Override + public String toString() { + return "Addition[lhs=" + lhs + ", rhs=" + rhs + ']'; + } + + @Override + public void close() {} + } + + record LoadFromPage(int channel) implements EvalOperator.ExpressionEvaluator { + @Override + public Block.Ref eval(Page page) { + return new Block.Ref(page.getBlock(channel), page); } @Override @@ -43,7 +65,7 @@ public void close() {} @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return new EvalOperator.EvalOperatorFactory(dvrCtx -> new Addition(0, 1)); + return new EvalOperator.EvalOperatorFactory(dvrCtx -> new Addition(dvrCtx, 0, 1)); } @Override @@ -67,9 +89,27 @@ protected void assertSimpleOutput(List input, List results) { } } + public void testReadFromBlock() { + DriverContext context = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), 10)); + List results = drive(new EvalOperator.EvalOperatorFactory(dvrCtx -> new LoadFromPage(0)).get(context), input.iterator()); + Set found = new TreeSet<>(); + for (var page : results) { + LongBlock lb = page.getBlock(2); + IntStream.range(0, lb.getPositionCount()).forEach(pos -> found.add(lb.getLong(pos))); + } + assertThat(found, equalTo(LongStream.range(0, 10).mapToObj(Long::valueOf).collect(Collectors.toSet()))); + results.forEach(Page::releaseBlocks); + assertThat(context.breaker().getUsed(), equalTo(0L)); + } + @Override protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeTrue("doesn't use big arrays so can't break", false); - return null; + return ByteSizeValue.ofBytes(between(1, 8000)); + } + + @Override + protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory + return breakingDriverContext(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index b26fe0c33fe1c..637018487a95d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -10,33 +10,42 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Tuple; +import java.util.ArrayList; import java.util.List; +import java.util.stream.IntStream; import java.util.stream.LongStream; import static org.hamcrest.Matchers.equalTo; public class FilterOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { - return new TupleBlockSourceOperator(LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { + return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } - record SameLastDigit(int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { + record SameLastDigit(DriverContext context, int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { LongVector lhsVector = page.getBlock(0).asVector(); LongVector rhsVector = page.getBlock(1).asVector(); - BooleanVector.Builder result = BooleanVector.newVectorBuilder(page.getPositionCount()); + BooleanVector.FixedBuilder result = BooleanVector.newVectorFixedBuilder(page.getPositionCount(), context.blockFactory()); for (int p = 0; p < page.getPositionCount(); p++) { result.appendBoolean(lhsVector.getLong(p) % 10 == rhsVector.getLong(p) % 10); } - return result.build().asBlock(); + return Block.Ref.floating(result.build().asBlock()); + } + + @Override + public String toString() { + return "SameLastDigit[lhs=" + lhs + ", rhs=" + rhs + ']'; } @Override @@ -45,7 +54,7 @@ public void close() {} @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return new FilterOperator.FilterOperatorFactory(dvrCtx -> new SameLastDigit(0, 1)); + return new FilterOperator.FilterOperatorFactory(dvrCtx -> new SameLastDigit(dvrCtx, 0, 1)); } @Override @@ -82,9 +91,36 @@ protected void assertSimpleOutput(List input, List results) { assertThat(actualCount, equalTo(expectedCount)); } + public void testNoResults() { + assertSimple(driverContext(), 3); + } + + public void testReadFromBlock() { + DriverContext context = driverContext(); + List input = CannedSourceOperator.collectPages( + new SequenceBooleanBlockSourceOperator(context.blockFactory(), List.of(true, false, true, false)) + ); + List results = drive( + new FilterOperator.FilterOperatorFactory(dvrCtx -> new EvalOperatorTests.LoadFromPage(0)).get(context), + input.iterator() + ); + List found = new ArrayList<>(); + for (var page : results) { + BooleanVector lb = page.getBlock(0).asVector(); + IntStream.range(0, lb.getPositionCount()).forEach(pos -> found.add(lb.getBoolean(pos))); + } + assertThat(found, equalTo(List.of(true, true))); + results.forEach(Page::releaseBlocks); + assertThat(context.breaker().getUsed(), equalTo(0L)); + } + @Override protected ByteSizeValue smallEnoughToCircuitBreak() { - assumeTrue("doesn't use big arrays so can't break", false); - return null; + return ByteSizeValue.ofBytes(between(1, 600)); + } + + @Override + protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory + return breakingDriverContext(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 9d1084fcc4cf3..d01a5b17ac788 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -56,7 +56,7 @@ protected final Operator.OperatorFactory simple(BigArrays bigArrays) { public final void testInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); try ( @@ -80,7 +80,7 @@ public final void testInitialFinal() { public final void testManyInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); try ( @@ -101,7 +101,7 @@ public final void testManyInitialFinal() { public final void testInitialIntermediateFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); try ( @@ -127,7 +127,7 @@ public final void testInitialIntermediateFinal() { public final void testManyInitialManyPartialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); Collections.shuffle(partials, random()); @@ -156,7 +156,7 @@ public final void testManyInitialManyPartialFinal() { // to move the data through the pipeline. public final void testManyInitialManyPartialFinalRunner() { BigArrays bigArrays = nonBreakingBigArrays(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, false /* no throwing ops */); @@ -178,7 +178,7 @@ protected void start(Driver driver, ActionListener listener) { // runner behaves correctly and also releases all resources (bigArrays) appropriately. public final void testManyInitialManyPartialFinalRunnerThrowing() { BigArrays bigArrays = nonBreakingBigArrays(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, true /* one throwing op */); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index 954a1f179f259..1afa5d3c02330 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongGroupingAggregatorFunctionTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -31,9 +32,12 @@ public class HashAggregationOperatorTests extends ForkingOperatorTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(l % 5, randomLongBetween(-max, max)))); + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(l % 5, randomLongBetween(-max, max))) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 228fdf262cf62..bbbfd44014ffc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -24,8 +25,8 @@ protected Operator.OperatorFactory simple(BigArrays bigArrays) { } @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size)); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java index e45b93b3180dc..370412714157a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; @@ -17,12 +18,8 @@ import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockTestUtils; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; @@ -100,17 +97,17 @@ public MultivalueDedupeTests( public void testDedupeAdaptive() { BasicBlockTests.RandomBlock b = randomBlock(); - assertDeduped(b, MultivalueDedupe.dedupeToBlockAdaptive(b.block())); + assertDeduped(b, MultivalueDedupe.dedupeToBlockAdaptive(Block.Ref.floating(b.block()))); } public void testDedupeViaCopyAndSort() { BasicBlockTests.RandomBlock b = randomBlock(); - assertDeduped(b, MultivalueDedupe.dedupeToBlockUsingCopyAndSort(b.block())); + assertDeduped(b, MultivalueDedupe.dedupeToBlockUsingCopyAndSort(Block.Ref.floating(b.block()))); } public void testDedupeViaCopyMissing() { BasicBlockTests.RandomBlock b = randomBlock(); - assertDeduped(b, MultivalueDedupe.dedupeToBlockUsingCopyMissing(b.block())); + assertDeduped(b, MultivalueDedupe.dedupeToBlockUsingCopyMissing(Block.Ref.floating(b.block()))); } private BasicBlockTests.RandomBlock randomBlock() { @@ -125,13 +122,13 @@ private BasicBlockTests.RandomBlock randomBlock() { ); } - private void assertDeduped(BasicBlockTests.RandomBlock b, Block deduped) { + private void assertDeduped(BasicBlockTests.RandomBlock b, Block.Ref deduped) { for (int p = 0; p < b.block().getPositionCount(); p++) { List v = b.values().get(p); Matcher matcher = v == null ? nullValue() : containsInAnyOrder(v.stream().collect(Collectors.toSet()).stream().sorted().toArray()); - BlockTestUtils.assertPositionValues(deduped, p, matcher); + BlockTestUtils.assertPositionValues(deduped.block(), p, matcher); } } @@ -200,7 +197,7 @@ public void testHashWithPreviousValues() { public void testBatchEncodeAll() { int initCapacity = Math.toIntExact(ByteSizeValue.ofKb(10).getBytes()); BasicBlockTests.RandomBlock b = randomBlock(); - BatchEncoder encoder = MultivalueDedupe.batchEncoder(b.block(), initCapacity); + var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(Block.Ref.floating(b.block()), initCapacity, false); int valueOffset = 0; for (int p = 0, positionOffset = Integer.MAX_VALUE; p < b.block().getPositionCount(); p++, positionOffset++) { @@ -217,7 +214,7 @@ public void testBatchEncodeAll() { public void testBatchEncoderStartSmall() { assumeFalse("Booleans don't grow in the same way", elementType == ElementType.BOOLEAN); BasicBlockTests.RandomBlock b = randomBlock(); - BatchEncoder encoder = MultivalueDedupe.batchEncoder(b.block(), 0); + var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(Block.Ref.floating(b.block()), 0, false); /* * We run can't fit the first non-null position into our 0 bytes. @@ -252,7 +249,7 @@ private void assertBooleanHash(Set previousValues, BasicBlockTests.Rand if (previousValues.contains(true)) { everSeen[2] = true; } - IntBlock hashes = new MultivalueDedupeBoolean((BooleanBlock) b.block()).hash(everSeen); + IntBlock hashes = new MultivalueDedupeBoolean(Block.Ref.floating(b.block())).hash(everSeen); List hashedValues = new ArrayList<>(); if (everSeen[1]) { hashedValues.add(false); @@ -266,7 +263,7 @@ private void assertBooleanHash(Set previousValues, BasicBlockTests.Rand private void assertBytesRefHash(Set previousValues, BasicBlockTests.RandomBlock b) { BytesRefHash hash = new BytesRefHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.stream().forEach(hash::add); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeBytesRef((BytesRefBlock) b.block()).hash(hash); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeBytesRef(Block.Ref.floating(b.block())).hash(hash); assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); assertHash(b, hashes.ords(), hash.size(), previousValues, i -> hash.get(i, new BytesRef())); } @@ -274,7 +271,7 @@ private void assertBytesRefHash(Set previousValues, BasicBlockTests.Ra private void assertIntHash(Set previousValues, BasicBlockTests.RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.stream().forEach(hash::add); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeInt((IntBlock) b.block()).hash(hash); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeInt(Block.Ref.floating(b.block())).hash(hash); assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); assertHash(b, hashes.ords(), hash.size(), previousValues, i -> (int) hash.get(i)); } @@ -282,7 +279,7 @@ private void assertIntHash(Set previousValues, BasicBlockTests.RandomBl private void assertLongHash(Set previousValues, BasicBlockTests.RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.stream().forEach(hash::add); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeLong((LongBlock) b.block()).hash(hash); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeLong(Block.Ref.floating(b.block())).hash(hash); assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); assertHash(b, hashes.ords(), hash.size(), previousValues, i -> hash.get(i)); } @@ -290,7 +287,7 @@ private void assertLongHash(Set previousValues, BasicBlockTests.RandomBloc private void assertDoubleHash(Set previousValues, BasicBlockTests.RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.stream().forEach(d -> hash.add(Double.doubleToLongBits(d))); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeDouble((DoubleBlock) b.block()).hash(hash); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeDouble(Block.Ref.floating(b.block())).hash(hash); assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); assertHash(b, hashes.ords(), hash.size(), previousValues, i -> Double.longBitsToDouble(hash.get(i))); } @@ -350,7 +347,9 @@ private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder en Block.Builder builder = elementType.newBlockBuilder(encoder.valueCount(offset)); BytesRef[] toDecode = new BytesRef[encoder.valueCount(offset)]; for (int i = 0; i < toDecode.length; i++) { - toDecode[i] = encoder.read(valueOffset++, new BytesRef()); + BytesRefBuilder dest = new BytesRefBuilder(); + encoder.read(valueOffset++, dest); + toDecode[i] = dest.toBytesRef(); if (b.values().get(position) == null) { // Nulls are encoded as 0 length values assertThat(toDecode[i].length, equalTo(0)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 80ac57ed539e7..21ca59e0f45a4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,7 +25,7 @@ public class MvExpandOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new AbstractBlockSourceOperator(8 * 1024) { private int idx; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 3b2fac5271aa6..a0bb6c035139b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArray; @@ -21,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -36,6 +38,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; /** * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}. @@ -44,7 +47,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { /** * Valid input to be sent to {@link #simple}; */ - protected abstract SourceOperator simpleInput(int size); + protected abstract SourceOperator simpleInput(BlockFactory blockFactory, int size); /** * Assert that output from {@link #simple} is correct for the @@ -80,15 +83,27 @@ public final void testSimpleLargeInput() { * in a sane way. */ public final void testSimpleCircuitBreaking() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()); + /* + * We build two CircuitBreakers - one for the input blocks and one for the operation itself. + * The input blocks don't count against the memory usage for the limited operator that we + * build. + */ + DriverContext inputFactoryContext = driverContext(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()) + .withCircuitBreaking(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); Exception e = expectThrows( CircuitBreakingException.class, - () -> assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)) + () -> drive(simple(bigArrays).get(new DriverContext(bigArrays, blockFactory)), input.iterator()) ); assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + + // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** @@ -98,15 +113,24 @@ public final void testSimpleCircuitBreaking() { * in ctors. */ public final void testSimpleWithCranky() { - CrankyCircuitBreakerService breaker = new CrankyCircuitBreakerService(); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breaker).withCircuitBreaking(); - BlockFactory blockFactory = BlockFactory.getInstance(breaker.getBreaker("request"), bigArrays); + DriverContext inputFactoryContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); + + CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); + BlockFactory blockFactory = BlockFactory.getInstance(cranky.getBreaker(CircuitBreaker.REQUEST), bigArrays); try { - assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)); + List result = drive(simple(bigArrays).get(new DriverContext(bigArrays, blockFactory)), input.iterator()); + Releasables.close(() -> Iterators.map(result.iterator(), p -> p::releaseBlocks)); // Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws } catch (CircuitBreakingException e) { + logger.info("broken", e); assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } + + // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** @@ -138,13 +162,14 @@ protected final List oneDriverPerPageList(Iterator> source, Sup return result; } - private void assertSimple(DriverContext context, int size) { - List input = CannedSourceOperator.collectPages(simpleInput(size)); + protected final void assertSimple(DriverContext context, int size) { + List input = CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), size)); + // Clone the input so that the operator can close it, then, later, we can read it again to build the assertion. + List inputClone = CannedSourceOperator.deepCopyOf(input); BigArrays bigArrays = context.bigArrays().withCircuitBreaking(); List results = drive(simple(bigArrays).get(context), input.iterator()); - assertSimpleOutput(input, results); + assertSimpleOutput(inputClone, results); results.forEach(Page::releaseBlocks); - assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); } protected final List drive(Operator operator, Iterator input) { @@ -153,6 +178,7 @@ protected final List drive(Operator operator, Iterator input) { protected final List drive(List operators, Iterator input) { List results = new ArrayList<>(); + boolean success = false; try ( Driver d = new Driver( driverContext(), @@ -163,6 +189,11 @@ protected final List drive(List operators, Iterator input) ) ) { runDriver(d); + success = true; + } finally { + if (success == false) { + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(results.iterator(), p -> p::releaseBlocks))); + } } return results; } @@ -180,9 +211,13 @@ public static void runDriver(List drivers) { "dummy-session", new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance()), () -> "dummy-driver", - new SequenceLongBlockSourceOperator(LongStream.range(0, between(1, 100)), between(1, 100)), + new SequenceLongBlockSourceOperator( + BlockFactory.getNonBreakingInstance(), + LongStream.range(0, between(1, 100)), + between(1, 100) + ), List.of(), - new PageConsumerOperator(page -> {}), + new PageConsumerOperator(page -> page.releaseBlocks()), Driver.DEFAULT_STATUS_INTERVAL, () -> {} ) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index baa7842bdc1f9..59e85390fc522 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; @@ -20,8 +18,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.junit.After; -import org.junit.Before; import java.util.Arrays; import java.util.HashSet; @@ -30,26 +26,14 @@ import java.util.stream.LongStream; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ProjectOperatorTests extends OperatorTestCase { - - final CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - final BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); - - @Before - @After - public void assertBreakerIsZero() { - assertThat(breaker.getUsed(), is(0L)); - } - @Override protected DriverContext driverContext() { - return new DriverContext(blockFactory.bigArrays(), blockFactory); + return breakingDriverContext(); } public void testProjectionOnEmptyPage() { @@ -60,10 +44,11 @@ public void testProjectionOnEmptyPage() { } public void testProjection() { + DriverContext context = driverContext(); var size = randomIntBetween(2, 5); var blocks = new Block[size]; for (int i = 0; i < blocks.length; i++) { - blocks[i] = blockFactory.newConstantIntBlockWith(i, size); + blocks[i] = context.blockFactory().newConstantIntBlockWith(i, size); } var page = new Page(size, blocks); @@ -90,7 +75,7 @@ private List randomProjection(int size) { } @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java index b85d328271c6f..807bfc2bf0b9b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; @@ -20,14 +21,16 @@ public class SequenceBooleanBlockSourceOperator extends AbstractBlockSourceOpera static final int DEFAULT_MAX_PAGE_POSITIONS = 8 * 1024; + private final BlockFactory blockFactory; private final boolean[] values; - public SequenceBooleanBlockSourceOperator(List values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceBooleanBlockSourceOperator(BlockFactory blockFactory, List values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceBooleanBlockSourceOperator(List values, int maxPagePositions) { + public SequenceBooleanBlockSourceOperator(BlockFactory blockFactory, List values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = new boolean[values.size()]; for (int i = 0; i < values.size(); i++) { this.values[i] = values.get(i); @@ -36,7 +39,7 @@ public SequenceBooleanBlockSourceOperator(List values, int maxPagePosit @Override protected Page createPage(int positionOffset, int length) { - BooleanVector.Builder builder = BooleanVector.newVectorBuilder(length); + BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(length, blockFactory); for (int i = 0; i < length; i++) { builder.appendBoolean(values[positionOffset + i]); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java index 0aa78f3ad0ab3..f7c3ee825d695 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java @@ -21,23 +21,27 @@ public class SequenceLongBlockSourceOperator extends AbstractBlockSourceOperator static final int DEFAULT_MAX_PAGE_POSITIONS = 8 * 1024; + private final BlockFactory blockFactory; + private final long[] values; - public SequenceLongBlockSourceOperator(LongStream values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, LongStream values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceLongBlockSourceOperator(LongStream values, int maxPagePositions) { + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, LongStream values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values.toArray(); } - public SequenceLongBlockSourceOperator(List values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, List values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceLongBlockSourceOperator(List values, int maxPagePositions) { + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, List values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values.stream().mapToLong(Long::longValue).toArray(); } @@ -48,7 +52,7 @@ protected Page createPage(int positionOffset, int length) { array[i] = values[positionOffset + i]; } currentPosition += length; - return new Page(BlockFactory.getNonBreakingInstance().newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile + return new Page(blockFactory.newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile } protected int remaining() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index f3c67f18589fa..bad8092b8a737 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; @@ -25,7 +26,7 @@ public class StringExtractOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List input = LongStream.range(0, end) .mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l)) .collect(Collectors.toList()); @@ -46,8 +47,8 @@ protected Operator.OperatorFactory simple(BigArrays bigArrays) { new String[] { "test" }, dvrCtx -> new EvalOperator.ExpressionEvaluator() { @Override - public Block eval(Page page) { - return page.getBlock(0); + public Block.Ref eval(Page page) { + return new Block.Ref(page.getBlock(0), page); } @Override @@ -91,8 +92,8 @@ public void testMultivalueDissectInput() { StringExtractOperator operator = new StringExtractOperator(new String[] { "test" }, new EvalOperator.ExpressionEvaluator() { @Override - public Block eval(Page page) { - return page.getBlock(0); + public Block.Ref eval(Page page) { + return new Block.Ref(page.getBlock(0), page); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java index 78cff5897c917..9b87dbe01224a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java @@ -26,10 +26,6 @@ public class TupleBlockSourceOperator extends AbstractBlockSourceOperator { private final List> values; - public TupleBlockSourceOperator(Stream> values) { - this(BlockFactory.getNonBreakingInstance(), values, DEFAULT_MAX_PAGE_POSITIONS); - } - public TupleBlockSourceOperator(BlockFactory blockFactory, Stream> values) { this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } @@ -40,14 +36,14 @@ public TupleBlockSourceOperator(BlockFactory blockFactory, Stream> values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public TupleBlockSourceOperator(BlockFactory blockFactory, List> values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public TupleBlockSourceOperator(List> values, int maxPagePositions) { + public TupleBlockSourceOperator(BlockFactory blockFactory, List> values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values; - blockFactory = BlockFactory.getNonBreakingInstance(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index d79fde19f5487..9c4358e5d9ee0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.DocVector; @@ -142,7 +143,13 @@ public void testNotInKey() { ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), false, value).writeValue(valuesBuilder, 0); assertThat(valuesBuilder.length(), greaterThan(0)); - ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), false, 1); + ResultBuilder result = ResultBuilder.resultBuilderFor( + BlockFactory.getNonBreakingInstance(), + testCase.type, + testCase.encoder.toUnsortable(), + false, + 1 + ); BytesRef values = valuesBuilder.bytesRefView(); result.decodeValue(values); assertThat(values.length, equalTo(0)); @@ -163,7 +170,13 @@ public void testInKey() { ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), true, value).writeValue(valuesBuilder, 0); assertThat(valuesBuilder.length(), greaterThan(0)); - ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), true, 1); + ResultBuilder result = ResultBuilder.resultBuilderFor( + BlockFactory.getNonBreakingInstance(), + testCase.type, + testCase.encoder.toUnsortable(), + true, + 1 + ); BytesRef keys = keysBuilder.bytesRefView(); if (testCase.type == ElementType.NULL) { assertThat(keys.length, equalTo(1)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 7491ffde6766e..97f3fe00a4601 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -37,16 +37,20 @@ import org.elasticsearch.compute.operator.TupleBlockSourceOperator; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.versionfield.Version; +import java.lang.reflect.Field; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -143,8 +147,12 @@ protected String expectedToStringOfSimple() { } @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> ESTestCase.randomLong()), between(1, size * 2)); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator( + blockFactory, + LongStream.range(0, size).map(l -> ESTestCase.randomLong()), + between(1, size * 2) + ); } @Override @@ -180,26 +188,48 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testRamBytesUsed() { + RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() { + @Override + public long accumulateObject(Object o, long shallowSize, Map fieldValues, Collection queue) { + if (o instanceof ElementType) { + return 0; // shared + } + if (o instanceof TopNEncoder) { + return 0; // shared + } + if (o instanceof CircuitBreaker) { + return 0; // shared + } + if (o instanceof BlockFactory) { + return 0; // shard + } + return super.accumulateObject(o, shallowSize, fieldValues, queue); + } + }; int topCount = 10_000; // We under-count by a few bytes because of the lists. In that end that's fine, but we need to account for it here. - long underCount = 100; - TopNOperator op = new TopNOperator.TopNOperatorFactory( - topCount, - List.of(LONG), - List.of(DEFAULT_UNSORTABLE), - List.of(new TopNOperator.SortOrder(0, true, false)), - pageSize - ).get(driverContext()); - long actualEmpty = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE) - - RamUsageTester.ramUsed(op.breaker()); - assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); - // But when we fill it then we're quite close - for (Page p : CannedSourceOperator.collectPages(simpleInput(topCount))) { - op.addInput(p); + long underCount = 200; + DriverContext context = driverContext(); + try ( + TopNOperator op = new TopNOperator.TopNOperatorFactory( + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false)), + pageSize + ).get(context) + ) { + long actualEmpty = RamUsageTester.ramUsed(op, acc); + assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); + // But when we fill it then we're quite close + for (Page p : CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), topCount))) { + op.addInput(p); + } + long actualFull = RamUsageTester.ramUsed(op, acc); + assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); + + // TODO empty it again and check. } - long actualFull = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE) - - RamUsageTester.ramUsed(op.breaker()); - assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); } public void testRandomTopN() { @@ -471,6 +501,7 @@ public void testCollectAllValues() { new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + blockFactory, nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -559,6 +590,7 @@ public void testCollectAllValues_RandomMultiValues() { new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + blockFactory, nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -590,9 +622,10 @@ private List> topNTwoColumns( try ( Driver driver = new Driver( driverContext, - new TupleBlockSourceOperator(inputValues, randomIntBetween(1, 1000)), + new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), limit, elementTypes, @@ -607,6 +640,7 @@ private List> topNTwoColumns( for (int i = 0; i < block1.getPositionCount(); i++) { outputValues.add(tuple(block1.isNull(i) ? null : block1.getLong(i), block2.isNull(i) ? null : block2.getLong(i))); } + page.releaseBlocks(); }), () -> {} ) @@ -848,6 +882,7 @@ private void assertSortingOnMV( TopNEncoder encoder, TopNOperator.SortOrder... sortOrders ) { + DriverContext driverContext = driverContext(); Block block = TestBlockBuilder.blockFromValues(values, blockType); assert block.mvOrdering() == Block.MvOrdering.UNORDERED : "Blocks created for this test must have unordered multi-values"; Page page = new Page(block); @@ -856,10 +891,11 @@ private void assertSortingOnMV( int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, List.of(blockType), @@ -878,6 +914,7 @@ private void assertSortingOnMV( } public void testRandomMultiValuesTopN() { + DriverContext driverContext = driverContext(); int rows = randomIntBetween(50, 100); int topCount = randomIntBetween(1, rows); int blocksCount = randomIntBetween(20, 30); @@ -969,8 +1006,9 @@ public void testRandomMultiValuesTopN() { } List>> actualValues = new ArrayList<>(); - List results = this.drive( + List results = drive( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -982,6 +1020,7 @@ public void testRandomMultiValuesTopN() { ); for (Page p : results) { readAsRows(actualValues, p); + p.releaseBlocks(); } List>> topNExpectedValues = expectedValues.stream() @@ -1003,13 +1042,15 @@ public void testIPSortingSingleValue() throws UnknownHostException { append(builder, new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip)))); } + DriverContext driverContext = driverContext(); List> actual = new ArrayList<>(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), ips.size(), List.of(BYTES_REF), @@ -1075,7 +1116,7 @@ public void testIPSortingUnorderedMultiValues() throws UnknownHostException { public void testIPSortingOrderedMultiValues() throws UnknownHostException { List> ips = new ArrayList<>(); - ips.add(List.of("123.4.245.23", "123.4.245.23")); + ips.add(List.of("123.4.245.23", "123.4.245.24")); ips.add(null); ips.add(List.of("104.30.244.2", "127.0.0.1")); ips.add(null); @@ -1092,17 +1133,17 @@ public void testIPSortingOrderedMultiValues() throws UnknownHostException { expectedDecodedIps.add(List.of("104.30.244.2", "127.0.0.1")); expectedDecodedIps.add(List.of("104.30.244.2", "124.255.255.255")); expectedDecodedIps.add(List.of("104.244.4.1")); - expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.23")); + expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.24")); } else { expectedDecodedIps.add(List.of("1.198.3.93", "2.3.4.5", "255.123.123.0")); expectedDecodedIps.add(List.of("104.30.244.2", "127.0.0.1")); expectedDecodedIps.add(List.of("104.30.244.2", "124.255.255.255")); - expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.23")); + expectedDecodedIps.add(List.of("123.4.245.23", "123.4.245.24")); expectedDecodedIps.add(List.of("104.244.4.1")); expectedDecodedIps.add(List.of("1.1.1.0", "32.183.93.40")); } - assertIPSortingOnMultiValues(ips, asc, Block.MvOrdering.ASCENDING, expectedDecodedIps); + assertIPSortingOnMultiValues(ips, asc, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, expectedDecodedIps); } private void assertIPSortingOnMultiValues( @@ -1128,12 +1169,14 @@ private void assertIPSortingOnMultiValues( } List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), ips.size(), List.of(BYTES_REF), @@ -1210,12 +1253,14 @@ public void testZeroByte() { blocks.add(builderInt.build()); List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), 2, List.of(BYTES_REF, INT), @@ -1243,10 +1288,55 @@ public void testZeroByte() { assertThat((Integer) actual.get(1).get(1), equalTo(100)); } + public void testErrorBeforeFullyDraining() { + int maxPageSize = between(1, 100); + int topCount = maxPageSize * 4; + int docCount = topCount * 10; + List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); + try ( + Driver driver = new Driver( + driverContext, + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), + List.of( + new TopNOperator( + driverContext.blockFactory(), + nonBreakingBigArrays().breakerService().getBreaker("request"), + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, randomBoolean())), + maxPageSize + ) + ), + new PageConsumerOperator(p -> { + assertThat(p.getPositionCount(), equalTo(maxPageSize)); + if (actual.isEmpty()) { + readInto(actual, p); + } else { + p.releaseBlocks(); + throw new RuntimeException("boo"); + } + }), + () -> {} + ) + ) { + Exception e = expectThrows(RuntimeException.class, () -> runDriver(driver)); + assertThat(e.getMessage(), equalTo("boo")); + } + + ListMatcher values = matchesList(); + for (int i = 0; i < maxPageSize; i++) { + values = values.item((long) i); + } + assertMap(actual, matchesList().item(values)); + } + public void testCloseWithoutCompleting() { CircuitBreaker breaker = new MockBigArrays.LimitedBreaker(CircuitBreaker.REQUEST, ByteSizeValue.ofGb(1)); try ( TopNOperator op = new TopNOperator( + driverContext().blockFactory(), breaker, 2, List.of(INT), @@ -1257,7 +1347,11 @@ public void testCloseWithoutCompleting() { ) { op.addInput(new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock())); } - assertThat(breaker.getUsed(), equalTo(0L)); + } + + @Override + protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory + return breakingDriverContext(); } @SuppressWarnings({ "unchecked", "rawtypes" }) diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle index 6f913100e0fd7..d5fedad1a537b 100644 --- a/x-pack/plugin/esql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle @@ -2,6 +2,7 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' dependencies { javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) + yamlRestTestImplementation project(xpackModule('esql:qa:server')) } restResources { diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java index 64aaf547e5468..38d58644926fe 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlIT.java @@ -11,6 +11,9 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.After; +import org.junit.Before; public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { @@ -22,4 +25,10 @@ public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { public static Iterable parameters() throws Exception { return createParameters(); } + + @Before + @After + public void assertRequestBreakerEmpty() throws Exception { + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml index 280a32aa10cd3..bae0e623d12a3 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml @@ -235,35 +235,36 @@ disjoint_mappings: - length: { values: 1 } - match: { values.0.0: 2 } - - do: - esql.query: - body: - query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' - - match: { columns.0.name: message1 } - - match: { columns.0.type: keyword } - - match: { columns.1.name: message2 } - - match: { columns.1.type: long } - - match: { columns.2.name: x } - - match: { columns.2.type: keyword } - - match: { columns.3.name: y } - - match: { columns.3.type: long } - - length: { values: 4 } - - match: { values.0.0: foo1 } - - match: { values.0.1: null } - - match: { values.0.2: foo1 } - - match: { values.0.3: null } - - match: { values.1.0: foo2 } - - match: { values.1.1: null } - - match: { values.1.2: foo2 } - - match: { values.1.3: null } - - match: { values.2.0: null } - - match: { values.2.1: 1 } - - match: { values.2.2: null } - - match: { values.2.3: 2 } - - match: { values.3.0: null } - - match: { values.3.1: 2 } - - match: { values.3.2: null } - - match: { values.3.3: 3 } +# AwaitsFix https://github.com/elastic/elasticsearch/issues/99826 +# - do: +# esql.query: +# body: +# query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' +# - match: { columns.0.name: message1 } +# - match: { columns.0.type: keyword } +# - match: { columns.1.name: message2 } +# - match: { columns.1.type: long } +# - match: { columns.2.name: x } +# - match: { columns.2.type: keyword } +# - match: { columns.3.name: y } +# - match: { columns.3.type: long } +# - length: { values: 4 } +# - match: { values.0.0: foo1 } +# - match: { values.0.1: null } +# - match: { values.0.2: foo1 } +# - match: { values.0.3: null } +# - match: { values.1.0: foo2 } +# - match: { values.1.1: null } +# - match: { values.1.2: foo2 } +# - match: { values.1.3: null } +# - match: { values.2.0: null } +# - match: { values.2.1: 1 } +# - match: { values.2.2: null } +# - match: { values.2.3: 2 } +# - match: { values.3.0: null } +# - match: { values.3.1: 2 } +# - match: { values.3.2: null } +# - match: { values.3.3: 3 } --- same_name_different_type: diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index e9dc848024448..776a2e732e5e9 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -8,8 +8,10 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.http.HttpEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.ESRestTestCase; @@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder; import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; import org.elasticsearch.xpack.ql.SpecReader; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -25,8 +28,11 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.CsvAssert.assertData; import static org.elasticsearch.xpack.esql.CsvAssert.assertMetadata; +import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; @@ -78,6 +84,10 @@ public static void wipeTestData() throws IOException { } } + public boolean logResults() { + return false; + } + public final void test() throws Throwable { try { assumeTrue("Test " + testName + " is not enabled", isEnabled(testName)); @@ -92,21 +102,29 @@ protected final void doTest() throws Throwable { Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); - assertNotNull(answer.get("columns")); + var metadata = answer.get("columns"); + assertNotNull(metadata); @SuppressWarnings("unchecked") - var actualColumns = (List>) answer.get("columns"); - assertMetadata(expectedColumnsWithValues, actualColumns, LOGGER); + var actualColumns = (List>) metadata; - assertNotNull(answer.get("values")); + Logger logger = logResults() ? LOGGER : null; + var values = answer.get("values"); + assertNotNull(values); @SuppressWarnings("unchecked") - List> actualValues = (List>) answer.get("values"); - assertData( - expectedColumnsWithValues, - actualValues, - testCase.ignoreOrder, - LOGGER, - value -> value == null ? "null" : value.toString() - ); + List> actualValues = (List>) values; + + assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger); + } + + protected void assertResults( + ExpectedResults expected, + List> actualColumns, + List> actualValues, + boolean ignoreOrder, + Logger logger + ) { + assertMetadata(expected, actualColumns, logger); + assertData(expected, actualValues, testCase.ignoreOrder, logger, value -> value == null ? "null" : value.toString()); } private Throwable reworkException(Throwable th) { @@ -123,4 +141,24 @@ private Throwable reworkException(Throwable th) { protected boolean preserveClusterUponCompletion() { return true; } + + @Before + @After + public void assertRequestBreakerEmptyAfterTests() throws Exception { + assertRequestBreakerEmpty(); + } + + public static void assertRequestBreakerEmpty() throws Exception { + assertBusy(() -> { + HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); + Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); + Map nodes = (Map) stats.get("nodes"); + for (Object n : nodes.values()) { + Map node = (Map) n; + Map breakers = (Map) node.get("breakers"); + Map request = (Map) breakers.get("request"); + assertMap(request, matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b")); + } + }); + } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 2b8bead0f86bc..14e645f37a659 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.junit.After; +import org.junit.Before; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -556,4 +557,10 @@ protected static String fromIndex() { protected boolean preserveClusterUponCompletion() { return true; } + + @Before + @After + public void assertRequestBreakerEmpty() throws Exception { + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index b58a7770eef10..2bfe366d9f01e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -415,7 +415,7 @@ Iterator> values() { } } - static void logMetaData(List actualColumnNames, List actualColumnTypes, Logger logger) { + public static void logMetaData(List actualColumnNames, List actualColumnTypes, Logger logger) { // header StringBuilder sb = new StringBuilder(); StringBuilder column = new StringBuilder(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index d1aa4dd811df3..866594b6d0315 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -411,3 +411,17 @@ ROW date_string = "2022-05-06" date_string:keyword | date:date 2022-05-06 | 2022-05-06T00:00:00.000Z ; + +docsReplace +//tag::replaceString[] +ROW str = "Hello World" +| EVAL str = REPLACE(str, "World", "Universe") +| KEEP str +// end::replaceString[] +; + +//tag::replaceString-result[] +str:keyword +Hello Universe +// end::replaceString-result[] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec index 3637081c3c4b6..fd87eedf57f2f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec @@ -205,7 +205,8 @@ emp_no:integer | languages:integer | gender:keyword | first_name:keyword | abc:i 10100 | 4 | F | Hironobu | 3 ; -projectFromWithStatsAfterLimit +# awaitsfix https://github.com/elastic/elasticsearch/issues/99826 +projectFromWithStatsAfterLimit-Ignore from employees | sort emp_no | keep gender, avg_worked_seconds, first_name, last_name | limit 10 | stats m = max(avg_worked_seconds) by gender; m:long | gender:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 360ef3b0a70b8..fdee34f2affca 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -57,6 +57,7 @@ now |now() percentile |percentile(arg1, arg2) pi |pi() pow |pow(base, exponent) +replace |replace(arg1, arg2, arg3) right |right(string, length) round |round(arg1, arg2) rtrim |rtrim(arg1) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 55016a4cd2dc2..6405c082cf784 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -533,3 +533,48 @@ c:l ; + +countAllGrouped +from employees | stats c = count(*) by languages | rename languages as l | sort l DESC ; + +c:l | l:i +10 |null +21 |5 +18 |4 +17 |3 +19 |2 +15 |1 +; + +countAllAndOtherStatGrouped +from employees | stats c = count(*), min = min(emp_no) by languages | sort languages; + +c:l | min:i | languages:i +15 | 10005 | 1 +19 | 10001 | 2 +17 | 10006 | 3 +18 | 10003 | 4 +21 | 10002 | 5 +10 | 10020 | null +; + +countAllWithEval +from employees | rename languages as l | stats min = min(salary) by l | eval x = min + 1 | stats ca = count(*), cx = count(x) by l | sort l; + +ca:l | cx:l | l:i +1 | 1 | 1 +1 | 1 | 2 +1 | 1 | 3 +1 | 1 | 4 +1 | 1 | 5 +1 | 1 | null +; + +aggsWithoutStats +from employees | stats by gender | sort gender; + +gender:keyword +F +M +null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 41d70c65cd5f9..2d1db44eea7be 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -721,6 +721,75 @@ Gateway | instances null | null ; +replaceSimple +from employees | sort emp_no | limit 1 | eval name = replace(first_name, "Geo", "foo") | keep emp_no, name; + +emp_no:integer | name:keyword +10001 | foorgi +; + +replaceText +from hosts | where host == "epsilon" | limit 1 | eval name = replace(host, "ep", "") | keep name; + +name:keyword +silon +; + +replaceComplex +from employees | where emp_no <= 10010 | eval f_l = replace(replace(substring(last_name, 1, 20), "al", "AB"), "a", "Z") | keep emp_no, last_name, f_l | sort emp_no; + +emp_no:integer | last_name:keyword | f_l:keyword +10001 | Facello | FZcello +10002 | Simmel | Simmel +10003 | Bamford | BZmford +10004 | Koblick | Koblick +10005 | Maliniak | MABiniZk +10006 | Preusig | Preusig +10007 | Zielinski | Zielinski +10008 | Kalloufi | KABloufi +10009 | Peac | PeZc +10010 | Piveteau | PiveteZu +; + +replaceComplex2 +from employees | where emp_no <= 10010 | eval f_l = substring(replace(last_name, "a", "Z"), 1, 1) | keep emp_no, last_name, f_l | sort emp_no; + +emp_no:integer | last_name:keyword | f_l:keyword +10001 | Facello | F +10002 | Simmel | S +10003 | Bamford | B +10004 | Koblick | K +10005 | Maliniak | M +10006 | Preusig | P +10007 | Zielinski | Z +10008 | Kalloufi | K +10009 | Peac | P +10010 | Piveteau | P +; + +replaceComplex3 +FROM employees | eval x = replace(to_string(emp_no), "(0+)", left(first_name, 1)), y = left(first_name, 1), is_match = y == substring(x, 2, 1) | keep first_name, y, x, is_match | sort is_match, x | limit 8; + +first_name:keyword| y:keyword | x:keyword |is_match:boolean +Alejandro |A |1A59 |true +Anneke |A |1A6 |true +Anoosh |A |1A62 |true +Amabile |A |1A91 |true +Arumugam |A |1A94 |true +Berni |B |1B14 |true +Bezalel |B |1B2 |true +Bojan |B |1B23 |true +; + +replaceRegex +from hosts | where host == "epsilon" | eval l1=replace(host_group, "\\s+", "") | sort l1 | keep l1, host_group; + + l1:keyword |host_group:text +Gatewayinstances|Gateway instances +Gatewayinstances|Gateway instances +null |null +; + left // tag::left[] FROM employees diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 9f5d7be3e63e0..2238b0c086d9e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -146,7 +146,6 @@ FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | K 16002960716282089759 | 9.169021087566165E20 ; - toRadians FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index df1fa6e67f279..08196b2d7726d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -203,7 +203,8 @@ bad 5.2.9-SNAPSHOT ; -groupByVersionCast +# AwaitsFix https://github.com/elastic/elasticsearch/issues/99826 +groupByVersionCast-Ignore FROM apps | EVAL g = TO_VER(CONCAT("1.", TO_STR(version))) | STATS id = MAX(id) BY g | SORT id | DROP g; id:i diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 2bffd5c64cdaf..519938c8e3201 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.exchange.ExchangeService; @@ -65,7 +64,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { public void testBreaker() { for (int i = 0; i < 5000; i++) { - IndexResponse response = client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i, "bar", i * 2).get(); + DocWriteResponse response = client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i, "bar", i * 2).get(); if (response.getResult() != DocWriteResponse.Result.CREATED) { fail("failure: " + response); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index f8aeee1569f2e..e8cee0407ca66 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -7,11 +7,10 @@ package org.elasticsearch.xpack.esql.action; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.Build; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.WriteRequest; @@ -35,6 +34,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -67,6 +67,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.nullValue; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100127") public class EsqlActionIT extends AbstractEsqlIntegTestCase { long epoch = System.currentTimeMillis(); @@ -109,32 +110,33 @@ public void testFromStatsGroupingAvgWithAliases() { } private void testFromStatsGroupingAvgImpl(String command, String expectedGroupName, String expectedFieldName) { - EsqlQueryResponse results = run(command); - logger.info(results); - Assert.assertEquals(2, results.columns().size()); - - // assert column metadata - ColumnInfo valuesColumn = results.columns().get(0); - assertEquals(expectedFieldName, valuesColumn.name()); - assertEquals("double", valuesColumn.type()); - ColumnInfo groupColumn = results.columns().get(1); - assertEquals(expectedGroupName, groupColumn.name()); - assertEquals("long", groupColumn.type()); + try (EsqlQueryResponse results = run(command)) { + logger.info(results); + Assert.assertEquals(2, results.columns().size()); - // assert column values - List> valueValues = getValuesList(results); - assertEquals(2, valueValues.size()); - // This is loathsome, find a declarative way to assert the expected output. - if ((long) valueValues.get(0).get(1) == 1L) { - assertEquals(42.0, (double) valueValues.get(0).get(0), 0.0); - assertEquals(2L, (long) valueValues.get(1).get(1)); - assertEquals(44.0, (double) valueValues.get(1).get(0), 0.0); - } else if ((long) valueValues.get(0).get(1) == 2L) { - assertEquals(42.0, (double) valueValues.get(1).get(0), 0.0); - assertEquals(1L, (long) valueValues.get(1).get(1)); - assertEquals(44.0, (double) valueValues.get(0).get(0), 0.0); - } else { - fail("Unexpected group value: " + valueValues.get(0).get(0)); + // assert column metadata + ColumnInfo valuesColumn = results.columns().get(0); + assertEquals(expectedFieldName, valuesColumn.name()); + assertEquals("double", valuesColumn.type()); + ColumnInfo groupColumn = results.columns().get(1); + assertEquals(expectedGroupName, groupColumn.name()); + assertEquals("long", groupColumn.type()); + + // assert column values + List> valueValues = getValuesList(results); + assertEquals(2, valueValues.size()); + // This is loathsome, find a declarative way to assert the expected output. + if ((long) valueValues.get(0).get(1) == 1L) { + assertEquals(42.0, (double) valueValues.get(0).get(0), 0.0); + assertEquals(2L, (long) valueValues.get(1).get(1)); + assertEquals(44.0, (double) valueValues.get(1).get(0), 0.0); + } else if ((long) valueValues.get(0).get(1) == 2L) { + assertEquals(42.0, (double) valueValues.get(1).get(0), 0.0); + assertEquals(1L, (long) valueValues.get(1).get(1)); + assertEquals(44.0, (double) valueValues.get(0).get(0), 0.0); + } else { + fail("Unexpected group value: " + valueValues.get(0).get(0)); + } } } @@ -211,19 +213,20 @@ public void testFromGroupingByNumericFieldWithNulls() { } } client().admin().indices().prepareRefresh("test").get(); - EsqlQueryResponse results = run("from test | stats avg(count) by data | sort data"); - logger.info(results); + try (EsqlQueryResponse results = run("from test | stats avg(count) by data | sort data")) { + logger.info(results); - assertThat(results.columns(), hasSize(2)); - assertEquals("avg(count)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); - assertEquals("data", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertThat(results.columns(), hasSize(2)); + assertEquals("avg(count)", results.columns().get(0).name()); + assertEquals("double", results.columns().get(0).type()); + assertEquals("data", results.columns().get(1).name()); + assertEquals("long", results.columns().get(1).type()); - record Group(Long data, Double avg) {} - List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); - List actualGroups = getValuesList(results).stream().map(l -> new Group((Long) l.get(1), (Double) l.get(0))).toList(); - assertThat(actualGroups, equalTo(expectedGroups)); + record Group(Long data, Double avg) {} + List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); + List actualGroups = getValuesList(results).stream().map(l -> new Group((Long) l.get(1), (Double) l.get(0))).toList(); + assertThat(actualGroups, equalTo(expectedGroups)); + } } public void testFromStatsGroupingByKeyword() { @@ -264,7 +267,7 @@ public void testFromStatsGroupingByKeywordWithNulls() { EsqlQueryResponse results = run("from test | stats avg = avg(" + field + ") by color"); logger.info(results); Assert.assertEquals(2, results.columns().size()); - Assert.assertEquals(4, getValuesList(results).size()); + Assert.assertEquals(5, getValuesList(results).size()); // assert column metadata assertEquals("avg", results.columns().get(0).name()); @@ -275,6 +278,7 @@ record Group(String color, Double avg) { } List expectedGroups = List.of( + new Group(null, 120.0), new Group("blue", 42.0), new Group("green", 44.0), new Group("red", 43.0), @@ -282,18 +286,10 @@ record Group(String color, Double avg) { ); List actualGroups = getValuesList(results).stream() .map(l -> new Group((String) l.get(1), (Double) l.get(0))) - .sorted(comparing(c -> c.color)) + .sorted(Comparator.comparing(c -> c.color, Comparator.nullsFirst(String::compareTo))) .toList(); assertThat(actualGroups, equalTo(expectedGroups)); } - for (int i = 0; i < 5; i++) { - client().prepareBulk() - .add(new DeleteRequest("test").id("no_color_" + i)) - .add(new DeleteRequest("test").id("no_count_red_" + i)) - .add(new DeleteRequest("test").id("no_count_yellow_" + i)) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); - } } public void testFromStatsMultipleAggs() { @@ -332,18 +328,19 @@ record Group(double avg, long mi, long ma, long s, long c, String color) {} } public void testFromSortWithTieBreakerLimit() { - EsqlQueryResponse results = run("from test | sort data, count desc, time | limit 5 | keep data, count, time"); - logger.info(results); - assertThat( - getValuesList(results), - contains( - List.of(1L, 44L, epoch + 2), - List.of(1L, 44L, epoch + 6), - List.of(1L, 44L, epoch + 10), - List.of(1L, 44L, epoch + 14), - List.of(1L, 44L, epoch + 18) - ) - ); + try (EsqlQueryResponse results = run("from test | sort data, count desc, time | limit 5 | keep data, count, time")) { + logger.info(results); + assertThat( + getValuesList(results), + contains( + List.of(1L, 44L, epoch + 2), + List.of(1L, 44L, epoch + 6), + List.of(1L, 44L, epoch + 10), + List.of(1L, 44L, epoch + 14), + List.of(1L, 44L, epoch + 18) + ) + ); + } } public void testFromStatsProjectGroup() { @@ -446,7 +443,48 @@ public void testFromEvalStats() { assertEquals(0.034d, (double) getValuesList(results).get(0).get(0), 0.001d); } - public void testFromStatsThenEval() { + public void testUngroupedCountAll() { + EsqlQueryResponse results = run("from test | stats count(*)"); + logger.info(results); + Assert.assertEquals(1, results.columns().size()); + Assert.assertEquals(1, getValuesList(results).size()); + assertEquals("count(*)", results.columns().get(0).name()); + assertEquals("long", results.columns().get(0).type()); + var values = getValuesList(results).get(0); + assertEquals(1, values.size()); + assertEquals(40, (long) values.get(0)); + } + + public void testUngroupedCountAllWithFilter() { + EsqlQueryResponse results = run("from test | where data > 1 | stats count(*)"); + logger.info(results); + Assert.assertEquals(1, results.columns().size()); + Assert.assertEquals(1, getValuesList(results).size()); + assertEquals("count(*)", results.columns().get(0).name()); + assertEquals("long", results.columns().get(0).type()); + var values = getValuesList(results).get(0); + assertEquals(1, values.size()); + assertEquals(20, (long) values.get(0)); + } + + @AwaitsFix(bugUrl = "tracking down a 64b(long) memory leak") + public void testGroupedCountAllWithFilter() { + EsqlQueryResponse results = run("from test | where data > 1 | stats count(*) by data | sort data"); + logger.info(results); + Assert.assertEquals(2, results.columns().size()); + Assert.assertEquals(1, getValuesList(results).size()); + assertEquals("count(*)", results.columns().get(0).name()); + assertEquals("long", results.columns().get(0).type()); + assertEquals("data", results.columns().get(1).name()); + assertEquals("long", results.columns().get(1).type()); + var values = getValuesList(results).get(0); + assertEquals(2, values.size()); + assertEquals(20, (long) values.get(0)); + assertEquals(2L, (long) values.get(1)); + } + + public void testFromStatsEvalWithPragma() { + assumeTrue("pragmas only enabled on snapshot builds", Build.current().isSnapshot()); EsqlQueryResponse results = run("from test | stats avg_count = avg(count) | eval x = avg_count + 7"); logger.info(results); Assert.assertEquals(1, getValuesList(results).size()); @@ -519,11 +557,6 @@ public void testFilterWithNullAndEvalFromIndex() { assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("data", "long")))); assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("data_d", "double")))); assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("time", "long")))); - - // restore index to original pre-test state - client().prepareBulk().add(new DeleteRequest("test").id("no_count")).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); - results = run("from test"); - Assert.assertEquals(40, getValuesList(results).size()); } public void testMultiConditionalWhere() { @@ -778,10 +811,11 @@ public void testFromStatsLimit() { } public void testFromLimit() { - EsqlQueryResponse results = run("from test | keep data | limit 2"); - logger.info(results); - assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); - assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); + try (EsqlQueryResponse results = run("from test | keep data | limit 2")) { + logger.info(results); + assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); + assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); + } } public void testDropAllColumns() { @@ -919,9 +953,6 @@ public void testInWithNullValue() { } public void testTopNPushedToLucene() { - BulkRequestBuilder bulkDelete = client().prepareBulk(); - bulkDelete.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - for (int i = 5; i < 11; i++) { var yellowDocId = "yellow_" + i; var yellowNullCountDocId = "yellow_null_count_" + i; @@ -935,11 +966,6 @@ public void testTopNPushedToLucene() { if (randomBoolean()) { client().admin().indices().prepareRefresh("test").get(); } - - // build the cleanup request now, as well, not to miss anything ;-) - bulkDelete.add(new DeleteRequest("test").id(yellowDocId)) - .add(new DeleteRequest("test").id(yellowNullCountDocId)) - .add(new DeleteRequest("test").id(yellowNullDataDocId)); } client().admin().indices().prepareRefresh("test").get(); @@ -1000,27 +1026,25 @@ public void testTopNPushedToLuceneOnSortedIndex() { ); int limit = randomIntBetween(1, 5); - EsqlQueryResponse results = run("from sorted_test_index | sort time " + sortOrder + " | limit " + limit + " | keep time"); - logger.info(results); - Assert.assertEquals(1, results.columns().size()); - Assert.assertEquals(limit, getValuesList(results).size()); - - // assert column metadata - assertEquals("time", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + try (EsqlQueryResponse results = run("from sorted_test_index | sort time " + sortOrder + " | limit " + limit + " | keep time")) { + logger.info(results); + Assert.assertEquals(1, results.columns().size()); + Assert.assertEquals(limit, getValuesList(results).size()); - boolean sortedDesc = "desc".equals(sortOrder); - var expected = LongStream.range(0, 40) - .map(i -> epoch + i) - .boxed() - .sorted(sortedDesc ? reverseOrder() : naturalOrder()) - .limit(limit) - .toList(); - var actual = getValuesList(results).stream().map(l -> (Long) l.get(0)).toList(); - assertThat(actual, equalTo(expected)); + // assert column metadata + assertEquals("time", results.columns().get(0).name()); + assertEquals("long", results.columns().get(0).type()); - // clean-up - client().admin().indices().delete(new DeleteIndexRequest("sorted_test_index")).actionGet(); + boolean sortedDesc = "desc".equals(sortOrder); + var expected = LongStream.range(0, 40) + .map(i -> epoch + i) + .boxed() + .sorted(sortedDesc ? reverseOrder() : naturalOrder()) + .limit(limit) + .toList(); + var actual = getValuesList(results).stream().map(l -> (Long) l.get(0)).toList(); + assertThat(actual, equalTo(expected)); + } } /* diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 16f704aa8f7c3..6c4ee71f6983c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.esql.action; -import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.search.DocIdSetIterator; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -23,6 +24,7 @@ import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; +import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -50,6 +52,9 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.emptyOrNullString; @@ -63,10 +68,9 @@ * Tests that we expose a reasonable task status. */ @TestLogging( - value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.tasks.TaskCancellationService:TRACE", - reason = "These tests are failing frequently; we need logs before muting them" + value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", + reason = "These tests were failing frequently, let's learn as much as we can" ) -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99589") public class EsqlActionTaskIT extends AbstractEsqlIntegTestCase { private static int PAGE_SIZE; private static int NUM_DOCS; @@ -93,7 +97,8 @@ public void setupIndex() throws IOException { MERGE_DESCRIPTION = """ \\_ExchangeSourceOperator[] \\_AggregationOperator[mode = FINAL, aggs = sum of longs] - \\_LimitOperator[limit = 10000] + \\_ProjectOperator[projection = [0]] + \\_LimitOperator[limit = 500] \\_OutputOperator[columns = sum(pause_me)]"""; XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); @@ -107,13 +112,35 @@ public void setupIndex() throws IOException { mapping.endObject(); } mapping.endObject(); - client().admin().indices().prepareCreate("test").setSettings(Map.of("number_of_shards", 1)).setMapping(mapping.endObject()).get(); + client().admin() + .indices() + .prepareCreate("test") + .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) + .setMapping(mapping.endObject()) + .get(); BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int i = 0; i < NUM_DOCS; i++) { bulk.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i)); } bulk.get(); + /* + * forceMerge so we can be sure that we don't bump into tiny + * segments that finish super quickly and cause us to report strange + * statuses when we expect "starting". + */ + client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); + /* + * Double super extra paranoid check that force merge worked. It's + * failed to reduce the index to a single segment and caused this test + * to fail in very difficult to debug ways. If it fails again, it'll + * trip here. Or maybe it won't! And we'll learn something. Maybe + * it's ghosts. + */ + SegmentsStats stats = client().admin().indices().prepareStats("test").get().getPrimaries().getSegments(); + if (stats.getCount() != 1L) { + fail(Strings.toString(stats)); + } } public void testTaskContents() throws Exception { @@ -130,19 +157,27 @@ public void testTaskContents() throws Exception { DriverStatus status = (DriverStatus) task.status(); assertThat(status.sessionId(), not(emptyOrNullString())); for (DriverStatus.OperatorStatus o : status.activeOperators()) { + logger.info("status {}", o); if (o.operator().startsWith("LuceneSourceOperator[maxPageSize=" + PAGE_SIZE)) { LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); - assertThat(oStatus.currentLeaf(), lessThanOrEqualTo(oStatus.totalLeaves())); - assertThat(oStatus.slicePosition(), greaterThanOrEqualTo(0)); - if (oStatus.sliceSize() != 0) { - assertThat(oStatus.slicePosition(), lessThanOrEqualTo(oStatus.sliceSize())); + assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); + assertThat(oStatus.sliceIndex(), lessThanOrEqualTo(oStatus.totalSlices())); + assertThat(oStatus.sliceMin(), greaterThanOrEqualTo(0)); + assertThat(oStatus.sliceMax(), greaterThanOrEqualTo(oStatus.sliceMin())); + if (oStatus.sliceMin() != 0 && oStatus.sliceMax() != 0) { + assertThat( + oStatus.current(), + either(both(greaterThanOrEqualTo(oStatus.sliceMin())).and(lessThanOrEqualTo(oStatus.sliceMax()))).or( + equalTo(DocIdSetIterator.NO_MORE_DOCS) + ) + ); } luceneSources++; continue; } if (o.operator().equals("ValuesSourceReaderOperator[field = pause_me]")) { ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); - assertThat(oStatus.readersBuilt(), equalTo(Map.of("LongValuesReader", 1))); + assertMap(oStatus.readersBuilt(), matchesMap().entry("LongValuesReader", greaterThanOrEqualTo(1))); assertThat(oStatus.pagesProcessed(), greaterThanOrEqualTo(1)); valuesSourceReaders++; continue; @@ -166,40 +201,51 @@ public void testTaskContents() throws Exception { assertThat(exchangeSinks, greaterThanOrEqualTo(1)); assertThat(exchangeSources, equalTo(1)); } finally { - scriptPermits.release(Integer.MAX_VALUE); + scriptPermits.release(NUM_DOCS); assertThat(Iterators.flatMap(response.get().values(), i -> i).next(), equalTo((long) NUM_DOCS)); } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99582") public void testCancelRead() throws Exception { ActionFuture response = startEsql(); - List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).findFirst().get(); - cancelTask(running.taskId()); - assertCancelled(response); + try { + List infos = getTasksStarting(); + TaskInfo running = infos.stream().filter(t -> t.description().equals(READ_DESCRIPTION)).findFirst().get(); + cancelTask(running.taskId()); + assertCancelled(response); + } finally { + scriptPermits.release(NUM_DOCS); + } } public void testCancelMerge() throws Exception { ActionFuture response = startEsql(); - List infos = getTasksStarting(); - TaskInfo running = infos.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).findFirst().get(); - cancelTask(running.taskId()); - assertCancelled(response); + try { + List infos = getTasksStarting(); + TaskInfo running = infos.stream().filter(t -> t.description().equals(MERGE_DESCRIPTION)).findFirst().get(); + cancelTask(running.taskId()); + assertCancelled(response); + } finally { + scriptPermits.release(NUM_DOCS); + } } public void testCancelEsqlTask() throws Exception { ActionFuture response = startEsql(); - getTasksStarting(); - List tasks = client().admin() - .cluster() - .prepareListTasks() - .setActions(EsqlQueryAction.NAME) - .setDetailed(true) - .get() - .getTasks(); - cancelTask(tasks.get(0).taskId()); - assertCancelled(response); + try { + getTasksStarting(); + List tasks = client().admin() + .cluster() + .prepareListTasks() + .setActions(EsqlQueryAction.NAME) + .setDetailed(true) + .get() + .getTasks(); + cancelTask(tasks.get(0).taskId()); + assertCancelled(response); + } finally { + scriptPermits.release(NUM_DOCS); + } } private ActionFuture startEsql() { @@ -225,7 +271,7 @@ private void cancelTask(TaskId taskId) { request.setWaitForCompletion(false); LOGGER.debug("--> cancelling task [{}] without waiting for completion", taskId); client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); - scriptPermits.release(Integer.MAX_VALUE / 2); + scriptPermits.release(NUM_DOCS); request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(true); LOGGER.debug("--> cancelling task [{}] with waiting for completion", taskId); @@ -233,8 +279,10 @@ private void cancelTask(TaskId taskId) { } /** - * Fetches tasks until it finds all of them are "starting". - */ + * Fetches tasks until it finds all of them are "starting" or "async". + * The "async" part is because the coordinating task almost immediately goes async + * because there isn't any data for it to process. + */ private List getTasksStarting() throws Exception { List foundTasks = new ArrayList<>(); assertBusy(() -> { @@ -248,10 +296,9 @@ private List getTasksStarting() throws Exception { assertThat(tasks, hasSize(equalTo(2))); for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); - logger.info("{}", task.description()); - assertThat(task.description(), either(equalTo(READ_DESCRIPTION)).or(equalTo(MERGE_DESCRIPTION))); DriverStatus status = (DriverStatus) task.status(); - logger.info("{}", status.status()); + logger.info("task {} {}", task.description(), status); + assertThat(task.description(), either(equalTo(READ_DESCRIPTION)).or(equalTo(MERGE_DESCRIPTION))); /* * Accept tasks that are either starting or have gone * immediately async. The coordinating task is likely diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java index fa5a1617e9d61..f9d97cbd910e0 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java @@ -232,7 +232,7 @@ public void testMultipleMatches() { static DriverContext driverContext() { return new DriverContext( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getGlobalInstance() + BlockFactory.getNonBreakingInstance() ); } } diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 8f07a8a5dcdea..044e920744375 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -76,8 +76,12 @@ operatorExpression primaryExpression : constant #constantDefault | qualifiedName #dereference + | functionExpression #function | LP booleanExpression RP #parenthesizedExpression - | identifier LP (booleanExpression (COMMA booleanExpression)*)? RP #functionExpression + ; + +functionExpression + : identifier LP (ASTERISK | (booleanExpression (COMMA booleanExpression)*))? RP ; rowCommand diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java index da2d698190fd7..a5eec9fbdc147 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -33,26 +33,28 @@ public EqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BooleanBlock lhsBlock = (BooleanBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BooleanBlock rhsBlock = (BooleanBlock) rhsUncastBlock; - BooleanVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BooleanVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BooleanBlock lhsBlock = (BooleanBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BooleanBlock rhsBlock = (BooleanBlock) rhsRef.block(); + BooleanVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BooleanVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java index 30e12b2a64b15..0a0f92b7c3317 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -35,26 +35,28 @@ public EqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java index 945bc1c6f96a7..7a4617def143f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -35,26 +35,28 @@ public EqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 826c78814e31c..e2714aff73dca 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -36,26 +36,28 @@ public EqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock lhsBlock = (BytesRefBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock rhsBlock = (BytesRefBlock) rhsUncastBlock; - BytesRefVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BytesRefVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java index 7065a01895771..6bf6bd0dddd88 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -35,26 +35,28 @@ public EqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index cc91e93a661fb..96ec932db9e7d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -35,26 +35,28 @@ public GreaterThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java index 6ace6ae8d24dc..b46028e853444 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -35,26 +35,28 @@ public GreaterThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 6d0e4f39f0e55..cdd14ce204efc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -36,26 +36,28 @@ public GreaterThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock lhsBlock = (BytesRefBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock rhsBlock = (BytesRefBlock) rhsUncastBlock; - BytesRefVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BytesRefVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 68a406f237bf7..3bb659ace811b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -35,26 +35,28 @@ public GreaterThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index 8077ad440b41c..1e3e5a701c394 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -35,26 +35,28 @@ public GreaterThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index ee3be0d1def76..fab7c0e89f243 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -35,26 +35,28 @@ public GreaterThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index 6f1b37411ccf9..d7409926ab268 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -36,26 +36,28 @@ public GreaterThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock lhsBlock = (BytesRefBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock rhsBlock = (BytesRefBlock) rhsUncastBlock; - BytesRefVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BytesRefVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index d64ffdb8392f7..fa778b35eb3c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -35,26 +35,28 @@ public GreaterThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java index 1e1cabaccfbfe..4e9857d686a4d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -35,26 +35,28 @@ public LessThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java index 2505da4e6aa63..83bbee227e675 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -35,26 +35,28 @@ public LessThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java index 9a9dd94411590..95c12fab29751 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -36,26 +36,28 @@ public LessThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock lhsBlock = (BytesRefBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock rhsBlock = (BytesRefBlock) rhsUncastBlock; - BytesRefVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BytesRefVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java index aa5dcc8ef6ff8..f679aba60ae0e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -35,26 +35,28 @@ public LessThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index 9460ffe577896..88d113de4c0fe 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -35,26 +35,28 @@ public LessThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 1c72bb13f7831..5c6cb60a8ac54 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -35,26 +35,28 @@ public LessThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index 20fc5d6420da4..2d3c7fcc98dab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -36,26 +36,28 @@ public LessThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock lhsBlock = (BytesRefBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock rhsBlock = (BytesRefBlock) rhsUncastBlock; - BytesRefVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BytesRefVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index 35b54c325ae3f..4de5dec43a4e9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -35,26 +35,28 @@ public LessThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index d795a3ea31476..d5a5b5d067d59 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -33,26 +33,28 @@ public NotEqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BooleanBlock lhsBlock = (BooleanBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BooleanBlock rhsBlock = (BooleanBlock) rhsUncastBlock; - BooleanVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BooleanVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BooleanBlock lhsBlock = (BooleanBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BooleanBlock rhsBlock = (BooleanBlock) rhsRef.block(); + BooleanVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BooleanVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index e3bd7dcb87de1..b2d4d7dce9a78 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -35,26 +35,28 @@ public NotEqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 94e297c6f5e7e..6a9581ba2f8f3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -35,26 +35,28 @@ public NotEqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 820ec32dc944c..741b504a32a0a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -36,26 +36,28 @@ public NotEqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock lhsBlock = (BytesRefBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock rhsBlock = (BytesRefBlock) rhsUncastBlock; - BytesRefVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - BytesRefVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); + BytesRefVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + BytesRefVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 563f1e4c24b57..bbc31838a105f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -35,26 +35,28 @@ public NotEqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index dc061192a38c7..df284b103372b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -29,17 +29,18 @@ public NotEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverCont } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BooleanBlock vBlock = (BooleanBlock) vUncastBlock; - BooleanVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BooleanBlock vBlock = (BooleanBlock) vRef.block(); + BooleanVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public BooleanBlock eval(int positionCount, BooleanBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java index 87916f3fb38a5..02e8831bf16aa 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java @@ -37,17 +37,18 @@ public RegexMatchEvaluator(EvalOperator.ExpressionEvaluator input, CharacterRunA } @Override - public Block eval(Page page) { - Block inputUncastBlock = input.eval(page); - if (inputUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock inputBlock = (BytesRefBlock) inputUncastBlock; - BytesRefVector inputVector = inputBlock.asVector(); - if (inputVector == null) { - return eval(page.getPositionCount(), inputBlock); + public Block.Ref eval(Page page) { + try (Block.Ref inputRef = input.eval(page)) { + if (inputRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock inputBlock = (BytesRefBlock) inputRef.block(); + BytesRefVector inputVector = inputBlock.asVector(); + if (inputVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), inputBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), inputVector).asBlock()); } - return eval(page.getPositionCount(), inputVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index a3c3328ec0f95..a4e39a7bbcc35 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public GreatestBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - BooleanBlock[] valuesBlocks = new BooleanBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + BooleanBlock[] valuesBlocks = new BooleanBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (BooleanBlock) block; } - valuesBlocks[i] = (BooleanBlock) block; - } - BooleanVector[] valuesVectors = new BooleanVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + BooleanVector[] valuesVectors = new BooleanVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index 0148253c97711..d6b9a12427411 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -32,23 +33,27 @@ public GreatestBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (BytesRefBlock) block; } - valuesBlocks[i] = (BytesRefBlock) block; - } - BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index 49e8aa3e5cdae..d326a976fa15c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public GreatestDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - DoubleBlock[] valuesBlocks = new DoubleBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + DoubleBlock[] valuesBlocks = new DoubleBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (DoubleBlock) block; } - valuesBlocks[i] = (DoubleBlock) block; - } - DoubleVector[] valuesVectors = new DoubleVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + DoubleVector[] valuesVectors = new DoubleVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index abd710005977e..d90ad799192b3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public GreatestIntEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - IntBlock[] valuesBlocks = new IntBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + IntBlock[] valuesBlocks = new IntBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (IntBlock) block; } - valuesBlocks[i] = (IntBlock) block; - } - IntVector[] valuesVectors = new IntVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + IntVector[] valuesVectors = new IntVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index af1fb0b99b60e..7519ae18d4bc7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public GreatestLongEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - LongBlock[] valuesBlocks = new LongBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + LongBlock[] valuesBlocks = new LongBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (LongBlock) block; } - valuesBlocks[i] = (LongBlock) block; - } - LongVector[] valuesVectors = new LongVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + LongVector[] valuesVectors = new LongVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index 98c9a8d7b2fe9..3a820c7096ee2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public LeastBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - BooleanBlock[] valuesBlocks = new BooleanBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + BooleanBlock[] valuesBlocks = new BooleanBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (BooleanBlock) block; } - valuesBlocks[i] = (BooleanBlock) block; - } - BooleanVector[] valuesVectors = new BooleanVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + BooleanVector[] valuesVectors = new BooleanVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index a05eb2cf41c69..6991344e9a307 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -32,23 +33,27 @@ public LeastBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (BytesRefBlock) block; } - valuesBlocks[i] = (BytesRefBlock) block; - } - BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index 628d3071d0d69..fbcc12231a6b2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public LeastDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - DoubleBlock[] valuesBlocks = new DoubleBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + DoubleBlock[] valuesBlocks = new DoubleBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (DoubleBlock) block; } - valuesBlocks[i] = (DoubleBlock) block; - } - DoubleVector[] valuesVectors = new DoubleVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + DoubleVector[] valuesVectors = new DoubleVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index d83403e5d5dc3..47abde5a2be56 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -30,23 +31,27 @@ public LeastIntEvaluator(EvalOperator.ExpressionEvaluator[] values, DriverContex } @Override - public Block eval(Page page) { - IntBlock[] valuesBlocks = new IntBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + IntBlock[] valuesBlocks = new IntBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (IntBlock) block; } - valuesBlocks[i] = (IntBlock) block; - } - IntVector[] valuesVectors = new IntVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + IntVector[] valuesVectors = new IntVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 18d2d8994a106..c8b0c0f39dce2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -31,23 +32,27 @@ public LeastLongEvaluator(EvalOperator.ExpressionEvaluator[] values, } @Override - public Block eval(Page page) { - LongBlock[] valuesBlocks = new LongBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + LongBlock[] valuesBlocks = new LongBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (LongBlock) block; } - valuesBlocks[i] = (LongBlock) block; - } - LongVector[] valuesVectors = new LongVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + LongVector[] valuesVectors = new LongVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java index dba115f4f7c29..0b8b444a2b6a3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java index d20179fd7baed..1295956645a6f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java index 7ab2d656a59cb..be01f122f9a8f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java index d70d0365aaf4d..7b83995bf0933 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendBoolean(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static boolean evalValue(BytesRefVector container, int index, BytesRef s public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java index d2cf4b41770ce..4a8aebe9cd8ab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.ConstantBooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToBooleanFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToBooleanFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToBooleanFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBooleanVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBooleanBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - boolean[] values = new boolean[positionCount]; + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendBoolean(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new BooleanArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BooleanArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static boolean evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static boolean evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java index 98310bb390392..ca237c1dcc4a7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToDatetimeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDatetimeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDatetimeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendLong(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static long evalValue(BytesRefVector container, int index, BytesRef scra public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index a168d93e73ba3..27509a4a18e56 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToDegreesEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDegreesEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDegreesEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(DoubleVector container, int index) { @@ -71,7 +65,7 @@ private static double evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java index d2b16e4b722cb..a6ab12763ddc2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java index 53e8edac3c5b3..5889cf151f0fa 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java index 9be5f1f2456b1..ff1c81f3f544f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index 653034f0c3bc9..197e5e5f2db36 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendDouble(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static double evalValue(BytesRefVector container, int index, BytesRef sc public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java index 54cc374c758fb..018517ae61d36 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToDoubleFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToDoubleFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToDoubleFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static double evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java index 76d5c58961970..b62fa771e492c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIPFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIPFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIPFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java index 49f79cd0bcd3e..9529769a02200 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index e1b0db72ad7d9..7af8bdbf083ef 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantIntVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index 9a1394b9c02cf..a84367ab27a30 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index 180e64f97e63b..bd7085764e341 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendInt(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static int evalValue(BytesRefVector container, int index, BytesRef scrat public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index 698db22c0ecc6..2312f94fec83e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToIntegerFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToIntegerFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToIntegerFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantIntVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - int[] values = new int[positionCount]; + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendInt(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new IntArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new IntArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static int evalValue(LongVector container, int index) { @@ -72,7 +66,7 @@ private static int evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java index bf76fb0eb8a59..48e3e45d42f46 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index 116be245e3191..14ec5e41a04e5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java index 02d043c641cb0..f0eae8bfccc44 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index cc825664cc331..8af9a14fd81be 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendLong(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static long evalValue(BytesRefVector container, int index, BytesRef scra public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index 02bef2f9f9c2d..569df205855d3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToLongFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToLongFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToLongFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(LongVector container, int index) { @@ -71,7 +65,7 @@ private static long evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java index 33ae94093dd85..6aa373e69b7cd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantDoubleVector; -import org.elasticsearch.compute.data.DoubleArrayBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToRadiansEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToRadiansEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToRadiansEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantDoubleVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - double[] values = new double[positionCount]; + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendDouble(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new DoubleArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new DoubleArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static double evalValue(DoubleVector container, int index) { @@ -71,7 +65,7 @@ private static double evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java index 876344b1c35bc..8507395c6153a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BooleanVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java index 8aa5148b21de4..7d6bf029fe80b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromDatetimeEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromDatetimeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(LongVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java index 8c7994a3c0a68..e0aa134286723 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(DoubleVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java index 4e0249939cc91..7ef6c3df27025 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromIPEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromIPEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromIPEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java index d076b38f49b91..abe206d5a5152 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(IntVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java index 90448cb992cd2..be6c2648f9eb4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(LongVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java index 91e31c9626b5e..9ba24301875d2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java @@ -6,18 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromUnsignedLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p)); + builder.appendBytesRef(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(LongVector container, int index) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java index 281b881bd6141..69d2e0e106fa0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToStringFromVersionEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToStringFromVersionEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToStringFromVersionEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java index ec8b16568c380..541e5b8c7af11 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromBooleanEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromBooleanEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BooleanVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(BooleanVector container, int index) { public Block evalBlock(Block b) { BooleanBlock block = (BooleanBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index 2ada365ce848e..89c896ccf1f43 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromDoubleEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromDoubleEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(DoubleVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(DoubleVector container, int index) { public Block evalBlock(Block b) { DoubleBlock block = (DoubleBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index 9acad2f9481a6..3c78c24ea7b01 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -6,15 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromIntEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromIntEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -38,29 +39,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(IntVector container, int index) { @@ -72,7 +66,7 @@ private static long evalValue(IntVector container, int index) { public Block evalBlock(Block b) { IntBlock block = (IntBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 0cb7da2ed230f..0c0cb9ebfb525 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -6,14 +6,11 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -22,8 +19,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromLongEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromLongEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -37,29 +38,22 @@ public Block evalVector(Vector v) { int positionCount = v.getPositionCount(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p); + builder.appendLong(evalValue(vector, p)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(LongVector container, int index) { @@ -71,7 +65,7 @@ private static long evalValue(LongVector container, int index) { public Block evalBlock(Block b) { LongBlock block = (LongBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); int start = block.getFirstValueIndex(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index 3297fcffbe73b..38056be01487c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -6,16 +6,13 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantLongVector; -import org.elasticsearch.compute.data.LongArrayBlock; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class ToUnsignedLongFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToUnsignedLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToUnsignedLongFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -40,29 +41,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantLongVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - long[] values = new long[positionCount]; + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values[p] = evalValue(vector, p, scratchPad); + builder.appendLong(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); + builder.appendNull(); } } - return nullsMask == null - ? new LongArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new LongArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -74,7 +68,7 @@ private static long evalValue(BytesRefVector container, int index, BytesRef scra public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java index 5f6b62e16de52..bead25f13dd6a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java @@ -6,17 +6,12 @@ import java.lang.Override; import java.lang.String; -import java.util.BitSet; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefArrayBlock; -import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ConstantBytesRefVector; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class ToVersionFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { - public ToVersionFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source) { + private final DriverContext driverContext; + + public ToVersionFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { super(field, source); + this.driverContext = driverContext; } @Override @@ -41,30 +40,22 @@ public Block evalVector(Vector v) { BytesRef scratchPad = new BytesRef(); if (vector.isConstant()) { try { - return new ConstantBytesRefVector(evalValue(vector, 0, scratchPad), positionCount).asBlock(); + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); } catch (Exception e) { registerException(e); - return Block.constantNullBlock(positionCount); + return Block.constantNullBlock(positionCount, driverContext.blockFactory()); } } - BitSet nullsMask = null; - BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); for (int p = 0; p < positionCount; p++) { try { - values.append(evalValue(vector, p, scratchPad)); + builder.appendBytesRef(evalValue(vector, p, scratchPad)); } catch (Exception e) { registerException(e); - if (nullsMask == null) { - nullsMask = new BitSet(positionCount); - } - nullsMask.set(p); - values.append(BytesRefBlock.NULL_VALUE); + builder.appendNull(); } } - return nullsMask == null - ? new BytesRefArrayVector(values, positionCount).asBlock() - // UNORDERED, since whatever ordering there is, it isn't necessarily preserved - : new BytesRefArrayBlock(values, positionCount, null, nullsMask, Block.MvOrdering.UNORDERED); + return builder.build(); } private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { @@ -76,7 +67,7 @@ private static BytesRef evalValue(BytesRefVector container, int index, BytesRef public Block evalBlock(Block b) { BytesRefBlock block = (BytesRefBlock) b; int positionCount = block.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); BytesRef scratchPad = new BytesRef(); for (int p = 0; p < positionCount; p++) { int valueCount = block.getValueCount(p); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index a0f4e6215f0f8..83481fa5483b3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -38,17 +38,18 @@ public DateExtractConstantEvaluator(EvalOperator.ExpressionEvaluator value, } @Override - public Block eval(Page page) { - Block valueUncastBlock = value.eval(page); - if (valueUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valueBlock = (LongBlock) valueUncastBlock; - LongVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valueRef = value.eval(page)) { + if (valueRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valueBlock = (LongBlock) valueRef.block(); + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valueBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valueVector).asBlock()); } - return eval(page.getPositionCount(), valueVector).asBlock(); } public LongBlock eval(int positionCount, LongBlock valueBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index 1a052c8c5b03b..fe6602ecbb099 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -46,26 +46,28 @@ public DateExtractEvaluator(Source source, EvalOperator.ExpressionEvaluator valu } @Override - public Block eval(Page page) { - Block valueUncastBlock = value.eval(page); - if (valueUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valueBlock = (LongBlock) valueUncastBlock; - Block chronoFieldUncastBlock = chronoField.eval(page); - if (chronoFieldUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoFieldUncastBlock; - LongVector valueVector = valueBlock.asVector(); - if (valueVector == null) { - return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); - } - BytesRefVector chronoFieldVector = chronoFieldBlock.asVector(); - if (chronoFieldVector == null) { - return eval(page.getPositionCount(), valueBlock, chronoFieldBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valueRef = value.eval(page)) { + if (valueRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valueBlock = (LongBlock) valueRef.block(); + try (Block.Ref chronoFieldRef = chronoField.eval(page)) { + if (chronoFieldRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoFieldRef.block(); + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valueBlock, chronoFieldBlock)); + } + BytesRefVector chronoFieldVector = chronoFieldBlock.asVector(); + if (chronoFieldVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valueBlock, chronoFieldBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valueVector, chronoFieldVector)); + } } - return eval(page.getPositionCount(), valueVector, chronoFieldVector); } public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chronoFieldBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index d724c1d27d81e..25cd28f6d2335 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -36,17 +36,18 @@ public DateFormatConstantEvaluator(EvalOperator.ExpressionEvaluator val, DateFor } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BytesRefBlock eval(int positionCount, LongBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index 1e1b9a9cbd366..cea51c3894555 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -40,26 +40,28 @@ public DateFormatEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - Block formatterUncastBlock = formatter.eval(page); - if (formatterUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock formatterBlock = (BytesRefBlock) formatterUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock, formatterBlock); - } - BytesRefVector formatterVector = formatterBlock.asVector(); - if (formatterVector == null) { - return eval(page.getPositionCount(), valBlock, formatterBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + try (Block.Ref formatterRef = formatter.eval(page)) { + if (formatterRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock formatterBlock = (BytesRefBlock) formatterRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, formatterBlock)); + } + BytesRefVector formatterVector = formatterBlock.asVector(); + if (formatterVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, formatterBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector, formatterVector).asBlock()); + } } - return eval(page.getPositionCount(), valVector, formatterVector).asBlock(); } public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock formatterBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 0a1029f479f6d..6d7552dcb5a13 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -42,17 +42,18 @@ public DateParseConstantEvaluator(Source source, EvalOperator.ExpressionEvaluato } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock valBlock = (BytesRefBlock) valUncastBlock; - BytesRefVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public LongBlock eval(int positionCount, BytesRefBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 00eedcdf78c43..aca55a953fcde 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -45,26 +45,28 @@ public DateParseEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock valBlock = (BytesRefBlock) valUncastBlock; - Block formatterUncastBlock = formatter.eval(page); - if (formatterUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock formatterBlock = (BytesRefBlock) formatterUncastBlock; - BytesRefVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock, formatterBlock); - } - BytesRefVector formatterVector = formatterBlock.asVector(); - if (formatterVector == null) { - return eval(page.getPositionCount(), valBlock, formatterBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); + try (Block.Ref formatterRef = formatter.eval(page)) { + if (formatterRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock formatterBlock = (BytesRefBlock) formatterRef.block(); + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, formatterBlock)); + } + BytesRefVector formatterVector = formatterBlock.asVector(); + if (formatterVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, formatterBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector, formatterVector)); + } } - return eval(page.getPositionCount(), valVector, formatterVector); } public LongBlock eval(int positionCount, BytesRefBlock valBlock, BytesRefBlock formatterBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index 1f6592b368dbc..956a1d07576c1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -34,17 +34,18 @@ public DateTruncEvaluator(EvalOperator.ExpressionEvaluator fieldVal, Rounding.Pr } @Override - public Block eval(Page page) { - Block fieldValUncastBlock = fieldVal.eval(page); - if (fieldValUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock fieldValBlock = (LongBlock) fieldValUncastBlock; - LongVector fieldValVector = fieldValBlock.asVector(); - if (fieldValVector == null) { - return eval(page.getPositionCount(), fieldValBlock); + public Block.Ref eval(Page page) { + try (Block.Ref fieldValRef = fieldVal.eval(page)) { + if (fieldValRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock fieldValBlock = (LongBlock) fieldValRef.block(); + LongVector fieldValVector = fieldValBlock.asVector(); + if (fieldValVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), fieldValBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), fieldValVector).asBlock()); } - return eval(page.getPositionCount(), fieldValVector).asBlock(); } public LongBlock eval(int positionCount, LongBlock fieldValBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index b07f07861076c..4fcb15d8f8952 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -27,8 +27,8 @@ public NowEvaluator(long now, DriverContext driverContext) { } @Override - public Block eval(Page page) { - return eval(page.getPositionCount()).asBlock(); + public Block.Ref eval(Page page) { + return Block.Ref.floating(eval(page.getPositionCount()).asBlock()); } public LongVector eval(int positionCount) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index 77810a8441471..7362242d8cc69 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -37,32 +38,37 @@ public CIDRMatchEvaluator(EvalOperator.ExpressionEvaluator ip, } @Override - public Block eval(Page page) { - Block ipUncastBlock = ip.eval(page); - if (ipUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock ipBlock = (BytesRefBlock) ipUncastBlock; - BytesRefBlock[] cidrsBlocks = new BytesRefBlock[cidrs.length]; - for (int i = 0; i < cidrsBlocks.length; i++) { - Block block = cidrs[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + try (Block.Ref ipRef = ip.eval(page)) { + if (ipRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); } - cidrsBlocks[i] = (BytesRefBlock) block; - } - BytesRefVector ipVector = ipBlock.asVector(); - if (ipVector == null) { - return eval(page.getPositionCount(), ipBlock, cidrsBlocks); - } - BytesRefVector[] cidrsVectors = new BytesRefVector[cidrs.length]; - for (int i = 0; i < cidrsBlocks.length; i++) { - cidrsVectors[i] = cidrsBlocks[i].asVector(); - if (cidrsVectors[i] == null) { - return eval(page.getPositionCount(), ipBlock, cidrsBlocks); + BytesRefBlock ipBlock = (BytesRefBlock) ipRef.block(); + Block.Ref[] cidrsRefs = new Block.Ref[cidrs.length]; + try (Releasable cidrsRelease = Releasables.wrap(cidrsRefs)) { + BytesRefBlock[] cidrsBlocks = new BytesRefBlock[cidrs.length]; + for (int i = 0; i < cidrsBlocks.length; i++) { + cidrsRefs[i] = cidrs[i].eval(page); + Block block = cidrsRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + cidrsBlocks[i] = (BytesRefBlock) block; + } + BytesRefVector ipVector = ipBlock.asVector(); + if (ipVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), ipBlock, cidrsBlocks)); + } + BytesRefVector[] cidrsVectors = new BytesRefVector[cidrs.length]; + for (int i = 0; i < cidrsBlocks.length; i++) { + cidrsVectors[i] = cidrsBlocks[i].asVector(); + if (cidrsVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), ipBlock, cidrsBlocks)); + } + } + return Block.Ref.floating(eval(page.getPositionCount(), ipVector, cidrsVectors).asBlock()); } } - return eval(page.getPositionCount(), ipVector, cidrsVectors).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock ipBlock, BytesRefBlock[] cidrsBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index c6f1d44b42226..0de6d0308c122 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -30,17 +30,18 @@ public AbsDoubleEvaluator(EvalOperator.ExpressionEvaluator fieldVal, } @Override - public Block eval(Page page) { - Block fieldValUncastBlock = fieldVal.eval(page); - if (fieldValUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock fieldValBlock = (DoubleBlock) fieldValUncastBlock; - DoubleVector fieldValVector = fieldValBlock.asVector(); - if (fieldValVector == null) { - return eval(page.getPositionCount(), fieldValBlock); + public Block.Ref eval(Page page) { + try (Block.Ref fieldValRef = fieldVal.eval(page)) { + if (fieldValRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock fieldValBlock = (DoubleBlock) fieldValRef.block(); + DoubleVector fieldValVector = fieldValBlock.asVector(); + if (fieldValVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), fieldValBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), fieldValVector).asBlock()); } - return eval(page.getPositionCount(), fieldValVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock fieldValBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 2ded2d1937765..797c72bee1109 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -29,17 +29,18 @@ public AbsIntEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext } @Override - public Block eval(Page page) { - Block fieldValUncastBlock = fieldVal.eval(page); - if (fieldValUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock fieldValBlock = (IntBlock) fieldValUncastBlock; - IntVector fieldValVector = fieldValBlock.asVector(); - if (fieldValVector == null) { - return eval(page.getPositionCount(), fieldValBlock); + public Block.Ref eval(Page page) { + try (Block.Ref fieldValRef = fieldVal.eval(page)) { + if (fieldValRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock fieldValBlock = (IntBlock) fieldValRef.block(); + IntVector fieldValVector = fieldValBlock.asVector(); + if (fieldValVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), fieldValBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), fieldValVector).asBlock()); } - return eval(page.getPositionCount(), fieldValVector).asBlock(); } public IntBlock eval(int positionCount, IntBlock fieldValBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index f2085df977695..19e8db24e0cd4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -29,17 +29,18 @@ public AbsLongEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext } @Override - public Block eval(Page page) { - Block fieldValUncastBlock = fieldVal.eval(page); - if (fieldValUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock fieldValBlock = (LongBlock) fieldValUncastBlock; - LongVector fieldValVector = fieldValBlock.asVector(); - if (fieldValVector == null) { - return eval(page.getPositionCount(), fieldValBlock); + public Block.Ref eval(Page page) { + try (Block.Ref fieldValRef = fieldVal.eval(page)) { + if (fieldValRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock fieldValBlock = (LongBlock) fieldValRef.block(); + LongVector fieldValVector = fieldValBlock.asVector(); + if (fieldValVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), fieldValBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), fieldValVector).asBlock()); } - return eval(page.getPositionCount(), fieldValVector).asBlock(); } public LongBlock eval(int positionCount, LongBlock fieldValBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 84d6969f67e51..64be85c7cb8a0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -36,17 +36,18 @@ public AcosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 303a1a21a9c17..015809cc02a20 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -36,17 +36,18 @@ public AsinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index ffc903c70e3d5..4b2accb338a5d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -33,26 +33,28 @@ public Atan2Evaluator(EvalOperator.ExpressionEvaluator y, EvalOperator.Expressio } @Override - public Block eval(Page page) { - Block yUncastBlock = y.eval(page); - if (yUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock yBlock = (DoubleBlock) yUncastBlock; - Block xUncastBlock = x.eval(page); - if (xUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock xBlock = (DoubleBlock) xUncastBlock; - DoubleVector yVector = yBlock.asVector(); - if (yVector == null) { - return eval(page.getPositionCount(), yBlock, xBlock); - } - DoubleVector xVector = xBlock.asVector(); - if (xVector == null) { - return eval(page.getPositionCount(), yBlock, xBlock); + public Block.Ref eval(Page page) { + try (Block.Ref yRef = y.eval(page)) { + if (yRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock yBlock = (DoubleBlock) yRef.block(); + try (Block.Ref xRef = x.eval(page)) { + if (xRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock xBlock = (DoubleBlock) xRef.block(); + DoubleVector yVector = yBlock.asVector(); + if (yVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), yBlock, xBlock)); + } + DoubleVector xVector = xBlock.asVector(); + if (xVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), yBlock, xBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), yVector, xVector).asBlock()); + } } - return eval(page.getPositionCount(), yVector, xVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock yBlock, DoubleBlock xBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index fe2152f9c1fda..5fa0d1e3330f3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -29,17 +29,18 @@ public AtanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverC } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index 84e8de91862f1..6e16e6b697fcf 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -31,17 +31,18 @@ public CastIntToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, DriverContex } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock vBlock = (IntBlock) vUncastBlock; - IntVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock vBlock = (IntBlock) vRef.block(); + IntVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public DoubleBlock eval(int positionCount, IntBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index e260f157b4a47..e36476b5b1fa0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -31,17 +31,18 @@ public CastIntToLongEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock vBlock = (IntBlock) vUncastBlock; - IntVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock vBlock = (IntBlock) vRef.block(); + IntVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public LongBlock eval(int positionCount, IntBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index d908234423bac..313a7fb7fc59e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -32,17 +32,18 @@ public CastIntToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock vBlock = (IntBlock) vUncastBlock; - IntVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock vBlock = (IntBlock) vRef.block(); + IntVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public LongBlock eval(int positionCount, IntBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 8406af3f21b8e..746bea80560ab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -32,17 +32,18 @@ public CastLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock vBlock = (LongBlock) vUncastBlock; - LongVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock vBlock = (LongBlock) vRef.block(); + LongVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public DoubleBlock eval(int positionCount, LongBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index 8d91999065e30..dd245e68085bf 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -30,17 +30,18 @@ public CastLongToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock vBlock = (LongBlock) vUncastBlock; - LongVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock vBlock = (LongBlock) vRef.block(); + LongVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public LongBlock eval(int positionCount, LongBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index 6dcad54ea4c61..8e7ec68789e18 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -32,17 +32,18 @@ public CastUnsignedLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock vBlock = (LongBlock) vUncastBlock; - LongVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock vBlock = (LongBlock) vRef.block(); + LongVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public DoubleBlock eval(int positionCount, LongBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index d27cc25407a88..ffe0fa4c535b1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -29,17 +29,18 @@ public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext d } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index ddc796cee6c43..db0f8ca849735 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -29,17 +29,18 @@ public CosEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverCo } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 01f7b1a6ba3d1..f8478be31e13b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -36,17 +36,18 @@ public CoshEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index c40444a4f5617..35d683845226c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -29,17 +29,18 @@ public FloorDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java index 1de4689d1f652..5118b3f437792 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java @@ -31,17 +31,18 @@ public IsFiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext dri } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java index 637b18ea212a7..3803263e7b249 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java @@ -31,17 +31,18 @@ public IsInfiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext d } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java index 6174d38f602a4..f8f813982a572 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java @@ -31,17 +31,18 @@ public IsNaNEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driver } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 569e0560406ec..92142620ba499 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -36,17 +36,18 @@ public Log10DoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index 0d70b89e0e673..7aac2ea753156 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -37,17 +37,18 @@ public Log10IntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock valBlock = (IntBlock) valUncastBlock; - IntVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock valBlock = (IntBlock) valRef.block(); + IntVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, IntBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index 52123291d8c01..359a3df1c552e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -37,17 +37,18 @@ public Log10LongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, LongBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index 4de00fe591a5f..ce6ec1e29b062 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -37,17 +37,18 @@ public Log10UnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluato } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, LongBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java index d129596790441..c0c60d497dc92 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java @@ -39,26 +39,28 @@ public PowDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator base, } @Override - public Block eval(Page page) { - Block baseUncastBlock = base.eval(page); - if (baseUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock baseBlock = (DoubleBlock) baseUncastBlock; - Block exponentUncastBlock = exponent.eval(page); - if (exponentUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock exponentBlock = (DoubleBlock) exponentUncastBlock; - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return eval(page.getPositionCount(), baseBlock, exponentBlock); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return eval(page.getPositionCount(), baseBlock, exponentBlock); + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + if (baseRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + if (exponentRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } } - return eval(page.getPositionCount(), baseVector, exponentVector); } public DoubleBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java index ae8f528c4075f..9ddd2077f063e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java @@ -40,26 +40,28 @@ public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, } @Override - public Block eval(Page page) { - Block baseUncastBlock = base.eval(page); - if (baseUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock baseBlock = (DoubleBlock) baseUncastBlock; - Block exponentUncastBlock = exponent.eval(page); - if (exponentUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock exponentBlock = (DoubleBlock) exponentUncastBlock; - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return eval(page.getPositionCount(), baseBlock, exponentBlock); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return eval(page.getPositionCount(), baseBlock, exponentBlock); + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + if (baseRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + if (exponentRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } } - return eval(page.getPositionCount(), baseVector, exponentVector); } public IntBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java index e9509f24a74b4..da37bef1fb3eb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java @@ -40,26 +40,28 @@ public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, } @Override - public Block eval(Page page) { - Block baseUncastBlock = base.eval(page); - if (baseUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock baseBlock = (DoubleBlock) baseUncastBlock; - Block exponentUncastBlock = exponent.eval(page); - if (exponentUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock exponentBlock = (DoubleBlock) exponentUncastBlock; - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return eval(page.getPositionCount(), baseBlock, exponentBlock); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return eval(page.getPositionCount(), baseBlock, exponentBlock); + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + if (baseRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + if (exponentRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } } - return eval(page.getPositionCount(), baseVector, exponentVector); } public LongBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 55119794db66f..83fe4d7e18a62 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -35,26 +35,28 @@ public RoundDoubleEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - Block decimalsUncastBlock = decimals.eval(page); - if (decimalsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock decimalsBlock = (LongBlock) decimalsUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); - } - LongVector decimalsVector = decimalsBlock.asVector(); - if (decimalsVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + try (Block.Ref decimalsRef = decimals.eval(page)) { + if (decimalsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + LongVector decimalsVector = decimalsBlock.asVector(); + if (decimalsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector, decimalsVector).asBlock()); + } } - return eval(page.getPositionCount(), valVector, decimalsVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock, LongBlock decimalsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index 9c928d5a785d4..b14fd17fe5891 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -30,17 +30,18 @@ public RoundDoubleNoDecimalsEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 6ae57c874a8d4..dc9801e855654 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -35,26 +35,28 @@ public RoundIntEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock valBlock = (IntBlock) valUncastBlock; - Block decimalsUncastBlock = decimals.eval(page); - if (decimalsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock decimalsBlock = (LongBlock) decimalsUncastBlock; - IntVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); - } - LongVector decimalsVector = decimalsBlock.asVector(); - if (decimalsVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock valBlock = (IntBlock) valRef.block(); + try (Block.Ref decimalsRef = decimals.eval(page)) { + if (decimalsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); + IntVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + LongVector decimalsVector = decimalsBlock.asVector(); + if (decimalsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector, decimalsVector).asBlock()); + } } - return eval(page.getPositionCount(), valVector, decimalsVector).asBlock(); } public IntBlock eval(int positionCount, IntBlock valBlock, LongBlock decimalsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index 221473cbe5931..16ae753d2e910 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -33,26 +33,28 @@ public RoundLongEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - Block decimalsUncastBlock = decimals.eval(page); - if (decimalsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock decimalsBlock = (LongBlock) decimalsUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); - } - LongVector decimalsVector = decimalsBlock.asVector(); - if (decimalsVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + try (Block.Ref decimalsRef = decimals.eval(page)) { + if (decimalsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + LongVector decimalsVector = decimalsBlock.asVector(); + if (decimalsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector, decimalsVector).asBlock()); + } } - return eval(page.getPositionCount(), valVector, decimalsVector).asBlock(); } public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 5a1051a48515b..d1d272fffa93c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -33,26 +33,28 @@ public RoundUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - Block decimalsUncastBlock = decimals.eval(page); - if (decimalsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock decimalsBlock = (LongBlock) decimalsUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); - } - LongVector decimalsVector = decimalsBlock.asVector(); - if (decimalsVector == null) { - return eval(page.getPositionCount(), valBlock, decimalsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + try (Block.Ref decimalsRef = decimals.eval(page)) { + if (decimalsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + LongVector decimalsVector = decimalsBlock.asVector(); + if (decimalsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock, decimalsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector, decimalsVector).asBlock()); + } } - return eval(page.getPositionCount(), valVector, decimalsVector).asBlock(); } public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index ef03377a9e3de..166f8810e1039 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -29,17 +29,18 @@ public SinEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverCo } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 7450438dc064a..1340255015763 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -36,17 +36,18 @@ public SinhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 6743e9548e189..31c8ebc6914fc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -36,17 +36,18 @@ public SqrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index 0ad276c45397d..71c136ec5aa2e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -37,17 +37,18 @@ public SqrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock valBlock = (IntBlock) valUncastBlock; - IntVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock valBlock = (IntBlock) valRef.block(); + IntVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, IntBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index dc2c4c9598cda..c34b0cda776ad 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -37,17 +37,18 @@ public SqrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector)); } - return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, LongBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index 736cf62284a7c..a5f69feb9277d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -32,17 +32,18 @@ public SqrtUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock valBlock = (LongBlock) valUncastBlock; - LongVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock valBlock = (LongBlock) valRef.block(); + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, LongBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 2477d395fafb8..d3d0c93a25407 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -29,17 +29,18 @@ public TanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverCo } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 63b30fec6009c..6e6437b758675 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -29,17 +29,18 @@ public TanhEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverC } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock valBlock = (DoubleBlock) valUncastBlock; - DoubleVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock valBlock = (DoubleBlock) valRef.block(); + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java index 6eb2c1082d2de..11622d7dbf149 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -35,49 +34,53 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + double value = v.getDouble(i); + MvAvg.process(work, value); + } + double result = MvAvg.finish(work, valueCount); + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - double value = v.getDouble(i); - MvAvg.process(work, value); - } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - double value = v.getDouble(i); - MvAvg.process(work, value); + public Block.Ref evalNotNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + double value = v.getDouble(i); + MvAvg.process(work, value); + } + double result = MvAvg.finish(work, valueCount); + builder.appendDouble(result); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java index 27c1d50cdf400..ca5e0874cc9c0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -36,105 +35,113 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - int first = v.getFirstValueIndex(p); - if (valueCount == 1) { - int value = v.getInt(first); - double result = MvAvg.single(value); + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + if (valueCount == 1) { + int value = v.getInt(first); + double result = MvAvg.single(value); + builder.appendDouble(result); + continue; + } + int end = first + valueCount; + for (int i = first; i < end; i++) { + int value = v.getInt(i); + MvAvg.process(work, value); + } + double result = MvAvg.finish(work, valueCount); builder.appendDouble(result); - continue; - } - int end = first + valueCount; - for (int i = first; i < end; i++) { - int value = v.getInt(i); - MvAvg.process(work, value); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - if (valueCount == 1) { - int value = v.getInt(first); - double result = MvAvg.single(value); + public Block.Ref evalNotNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + if (valueCount == 1) { + int value = v.getInt(first); + double result = MvAvg.single(value); + builder.appendDouble(result); + continue; + } + int end = first + valueCount; + for (int i = first; i < end; i++) { + int value = v.getInt(i); + MvAvg.process(work, value); + } + double result = MvAvg.finish(work, valueCount); builder.appendDouble(result); - continue; - } - int end = first + valueCount; - for (int i = first; i < end; i++) { - int value = v.getInt(i); - MvAvg.process(work, value); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing only single valued fields. */ @Override - public Block evalSingleValuedNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + public Block.Ref evalSingleValuedNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + assert valueCount == 1; + int first = v.getFirstValueIndex(p); + int value = v.getInt(first); + double result = MvAvg.single(value); + builder.appendDouble(result); } - assert valueCount == 1; - int first = v.getFirstValueIndex(p); - int value = v.getInt(first); - double result = MvAvg.single(value); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing only single valued fields. */ @Override - public Vector evalSingleValuedNotNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - assert valueCount == 1; - int first = v.getFirstValueIndex(p); - int value = v.getInt(first); - double result = MvAvg.single(value); - builder.appendDouble(result); + public Block.Ref evalSingleValuedNotNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + assert valueCount == 1; + int first = v.getFirstValueIndex(p); + int value = v.getInt(first); + double result = MvAvg.single(value); + builder.appendDouble(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java index 652d7f73a0872..9bbf92171d6cf 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -36,105 +35,113 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - int first = v.getFirstValueIndex(p); - if (valueCount == 1) { - long value = v.getLong(first); - double result = MvAvg.single(value); + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + if (valueCount == 1) { + long value = v.getLong(first); + double result = MvAvg.single(value); + builder.appendDouble(result); + continue; + } + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvAvg.process(work, value); + } + double result = MvAvg.finish(work, valueCount); builder.appendDouble(result); - continue; - } - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvAvg.process(work, value); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - if (valueCount == 1) { - long value = v.getLong(first); - double result = MvAvg.single(value); + public Block.Ref evalNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + if (valueCount == 1) { + long value = v.getLong(first); + double result = MvAvg.single(value); + builder.appendDouble(result); + continue; + } + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvAvg.process(work, value); + } + double result = MvAvg.finish(work, valueCount); builder.appendDouble(result); - continue; - } - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvAvg.process(work, value); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing only single valued fields. */ @Override - public Block evalSingleValuedNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + public Block.Ref evalSingleValuedNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + assert valueCount == 1; + int first = v.getFirstValueIndex(p); + long value = v.getLong(first); + double result = MvAvg.single(value); + builder.appendDouble(result); } - assert valueCount == 1; - int first = v.getFirstValueIndex(p); - long value = v.getLong(first); - double result = MvAvg.single(value); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing only single valued fields. */ @Override - public Vector evalSingleValuedNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - assert valueCount == 1; - int first = v.getFirstValueIndex(p); - long value = v.getLong(first); - double result = MvAvg.single(value); - builder.appendDouble(result); + public Block.Ref evalSingleValuedNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + assert valueCount == 1; + int first = v.getFirstValueIndex(p); + long value = v.getLong(first); + double result = MvAvg.single(value); + builder.appendDouble(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java index fd9f25124898b..1c448ebfd2855 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -37,105 +36,113 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - int first = v.getFirstValueIndex(p); - if (valueCount == 1) { - long value = v.getLong(first); - double result = MvAvg.singleUnsignedLong(value); + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + if (valueCount == 1) { + long value = v.getLong(first); + double result = MvAvg.singleUnsignedLong(value); + builder.appendDouble(result); + continue; + } + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvAvg.processUnsignedLong(work, value); + } + double result = MvAvg.finish(work, valueCount); builder.appendDouble(result); - continue; - } - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvAvg.processUnsignedLong(work, value); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - if (valueCount == 1) { - long value = v.getLong(first); - double result = MvAvg.singleUnsignedLong(value); + public Block.Ref evalNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + if (valueCount == 1) { + long value = v.getLong(first); + double result = MvAvg.singleUnsignedLong(value); + builder.appendDouble(result); + continue; + } + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvAvg.processUnsignedLong(work, value); + } + double result = MvAvg.finish(work, valueCount); builder.appendDouble(result); - continue; - } - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvAvg.processUnsignedLong(work, value); } - double result = MvAvg.finish(work, valueCount); - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing only single valued fields. */ @Override - public Block evalSingleValuedNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + public Block.Ref evalSingleValuedNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + assert valueCount == 1; + int first = v.getFirstValueIndex(p); + long value = v.getLong(first); + double result = MvAvg.singleUnsignedLong(value); + builder.appendDouble(result); } - assert valueCount == 1; - int first = v.getFirstValueIndex(p); - long value = v.getLong(first); - double result = MvAvg.singleUnsignedLong(value); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing only single valued fields. */ @Override - public Vector evalSingleValuedNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - assert valueCount == 1; - int first = v.getFirstValueIndex(p); - long value = v.getLong(first); - double result = MvAvg.singleUnsignedLong(value); - builder.appendDouble(result); + public Block.Ref evalSingleValuedNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + assert valueCount == 1; + int first = v.getFirstValueIndex(p); + long value = v.getLong(first); + double result = MvAvg.singleUnsignedLong(value); + builder.appendDouble(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index 3b15fe9f17293..735de1c43ca04 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -35,93 +34,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean value = v.getBoolean(first); + for (int i = first + 1; i < end; i++) { + boolean next = v.getBoolean(i); + value = MvMax.process(value, next); + } + boolean result = value; + builder.appendBoolean(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - boolean value = v.getBoolean(first); - for (int i = first + 1; i < end; i++) { - boolean next = v.getBoolean(i); - value = MvMax.process(value, next); - } - boolean result = value; - builder.appendBoolean(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - boolean value = v.getBoolean(first); - for (int i = first + 1; i < end; i++) { - boolean next = v.getBoolean(i); - value = MvMax.process(value, next); + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean value = v.getBoolean(first); + for (int i = first + 1; i < end; i++) { + boolean next = v.getBoolean(i); + value = MvMax.process(value, next); + } + boolean result = value; + builder.appendBoolean(result); } - boolean result = value; - builder.appendBoolean(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + boolean result = v.getBoolean(first + idx); + builder.appendBoolean(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - boolean result = v.getBoolean(first + idx); - builder.appendBoolean(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - boolean result = v.getBoolean(first + idx); - builder.appendBoolean(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + boolean result = v.getBoolean(first + idx); + builder.appendBoolean(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index 6401664c9aa0d..cd2f28a1e2665 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -36,101 +35,109 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef value = v.getBytesRef(first, firstScratch); + for (int i = first + 1; i < end; i++) { + BytesRef next = v.getBytesRef(i, nextScratch); + MvMax.process(value, next); + } + BytesRef result = value; + builder.appendBytesRef(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - BytesRef value = v.getBytesRef(first, firstScratch); - for (int i = first + 1; i < end; i++) { - BytesRef next = v.getBytesRef(i, nextScratch); - MvMax.process(value, next); - } - BytesRef result = value; - builder.appendBytesRef(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - BytesRef value = v.getBytesRef(first, firstScratch); - for (int i = first + 1; i < end; i++) { - BytesRef next = v.getBytesRef(i, nextScratch); - MvMax.process(value, next); + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef value = v.getBytesRef(first, firstScratch); + for (int i = first + 1; i < end; i++) { + BytesRef next = v.getBytesRef(i, nextScratch); + MvMax.process(value, next); + } + BytesRef result = value; + builder.appendBytesRef(result); } - BytesRef result = value; - builder.appendBytesRef(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + BytesRef result = v.getBytesRef(first + idx, firstScratch); + builder.appendBytesRef(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - BytesRef result = v.getBytesRef(first + idx, firstScratch); - builder.appendBytesRef(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - BytesRef result = v.getBytesRef(first + idx, firstScratch); - builder.appendBytesRef(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + BytesRef result = v.getBytesRef(first + idx, firstScratch); + builder.appendBytesRef(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index 0ec72b82e2438..92dc8970e43ce 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double value = v.getDouble(first); + for (int i = first + 1; i < end; i++) { + double next = v.getDouble(i); + value = MvMax.process(value, next); + } + double result = value; + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - double value = v.getDouble(first); - for (int i = first + 1; i < end; i++) { - double next = v.getDouble(i); - value = MvMax.process(value, next); - } - double result = value; - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - double value = v.getDouble(first); - for (int i = first + 1; i < end; i++) { - double next = v.getDouble(i); - value = MvMax.process(value, next); + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double value = v.getDouble(first); + for (int i = first + 1; i < end; i++) { + double next = v.getDouble(i); + value = MvMax.process(value, next); + } + double result = value; + builder.appendDouble(result); } - double result = value; - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + double result = v.getDouble(first + idx); + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - double result = v.getDouble(first + idx); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - double result = v.getDouble(first + idx); - builder.appendDouble(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + double result = v.getDouble(first + idx); + builder.appendDouble(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index 2bf14b26c6c5e..faf4c82dd0076 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int value = v.getInt(first); + for (int i = first + 1; i < end; i++) { + int next = v.getInt(i); + value = MvMax.process(value, next); + } + int result = value; + builder.appendInt(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - int value = v.getInt(first); - for (int i = first + 1; i < end; i++) { - int next = v.getInt(i); - value = MvMax.process(value, next); - } - int result = value; - builder.appendInt(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - int value = v.getInt(first); - for (int i = first + 1; i < end; i++) { - int next = v.getInt(i); - value = MvMax.process(value, next); + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int value = v.getInt(first); + for (int i = first + 1; i < end; i++) { + int next = v.getInt(i); + value = MvMax.process(value, next); + } + int result = value; + builder.appendInt(result); } - int result = value; - builder.appendInt(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + int result = v.getInt(first + idx); + builder.appendInt(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - int result = v.getInt(first + idx); - builder.appendInt(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - int result = v.getInt(first + idx); - builder.appendInt(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + int result = v.getInt(first + idx); + builder.appendInt(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index ce5a95bee7699..45c8618280c8b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long value = v.getLong(first); + for (int i = first + 1; i < end; i++) { + long next = v.getLong(i); + value = MvMax.process(value, next); + } + long result = value; + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - long value = v.getLong(first); - for (int i = first + 1; i < end; i++) { - long next = v.getLong(i); - value = MvMax.process(value, next); - } - long result = value; - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - long value = v.getLong(first); - for (int i = first + 1; i < end; i++) { - long next = v.getLong(i); - value = MvMax.process(value, next); + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long value = v.getLong(first); + for (int i = first + 1; i < end; i++) { + long next = v.getLong(i); + value = MvMax.process(value, next); + } + long result = value; + builder.appendLong(result); } - long result = value; - builder.appendLong(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + long result = v.getLong(first + idx); + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - long result = v.getLong(first + idx); - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMax.ascendingIndex(valueCount); - long result = v.getLong(first + idx); - builder.appendLong(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMax.ascendingIndex(valueCount); + long result = v.getLong(first + idx); + builder.appendLong(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java index 15d47db18751f..c4ad0e3c6ad12 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -35,49 +34,53 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Doubles work = new MvMedian.Doubles(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Doubles work = new MvMedian.Doubles(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + double value = v.getDouble(i); + MvMedian.process(work, value); + } + double result = MvMedian.finish(work); + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - double value = v.getDouble(i); - MvMedian.process(work, value); - } - double result = MvMedian.finish(work); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Doubles work = new MvMedian.Doubles(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - double value = v.getDouble(i); - MvMedian.process(work, value); + public Block.Ref evalNotNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Doubles work = new MvMedian.Doubles(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + double value = v.getDouble(i); + MvMedian.process(work, value); + } + double result = MvMedian.finish(work); + builder.appendDouble(result); } - double result = MvMedian.finish(work); - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index 711992a20763e..6fb6386b1d6e1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Ints work = new MvMedian.Ints(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Ints work = new MvMedian.Ints(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + int value = v.getInt(i); + MvMedian.process(work, value); + } + int result = MvMedian.finish(work); + builder.appendInt(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - int value = v.getInt(i); - MvMedian.process(work, value); - } - int result = MvMedian.finish(work); - builder.appendInt(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Ints work = new MvMedian.Ints(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - int value = v.getInt(i); - MvMedian.process(work, value); + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Ints work = new MvMedian.Ints(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + int value = v.getInt(i); + MvMedian.process(work, value); + } + int result = MvMedian.finish(work); + builder.appendInt(result); } - int result = MvMedian.finish(work); - builder.appendInt(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Ints work = new MvMedian.Ints(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Ints work = new MvMedian.Ints(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int result = MvMedian.ascending(v, first, valueCount); + builder.appendInt(result); } - int first = v.getFirstValueIndex(p); - int result = MvMedian.ascending(v, first, valueCount); - builder.appendInt(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Ints work = new MvMedian.Ints(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int result = MvMedian.ascending(v, first, valueCount); - builder.appendInt(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Ints work = new MvMedian.Ints(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int result = MvMedian.ascending(v, first, valueCount); + builder.appendInt(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index 67d3c123a6953..cd50980f6331a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -35,93 +34,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvMedian.process(work, value); + } + long result = MvMedian.finish(work); + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvMedian.process(work, value); - } - long result = MvMedian.finish(work); - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvMedian.process(work, value); + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvMedian.process(work, value); + } + long result = MvMedian.finish(work); + builder.appendLong(result); } - long result = MvMedian.finish(work); - builder.appendLong(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + long result = MvMedian.ascending(v, first, valueCount); + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - long result = MvMedian.ascending(v, first, valueCount); - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - long result = MvMedian.ascending(v, first, valueCount); - builder.appendLong(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + long result = MvMedian.ascending(v, first, valueCount); + builder.appendLong(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index 93538708039b5..1eff172b532f0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -35,93 +34,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvMedian.processUnsignedLong(work, value); + } + long result = MvMedian.finishUnsignedLong(work); + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvMedian.processUnsignedLong(work, value); - } - long result = MvMedian.finishUnsignedLong(work); - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - long value = v.getLong(i); - MvMedian.processUnsignedLong(work, value); + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + long value = v.getLong(i); + MvMedian.processUnsignedLong(work, value); + } + long result = MvMedian.finishUnsignedLong(work); + builder.appendLong(result); } - long result = MvMedian.finishUnsignedLong(work); - builder.appendLong(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + long result = MvMedian.ascendingUnsignedLong(v, first, valueCount); + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - long result = MvMedian.ascendingUnsignedLong(v, first, valueCount); - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - MvMedian.Longs work = new MvMedian.Longs(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - long result = MvMedian.ascendingUnsignedLong(v, first, valueCount); - builder.appendLong(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + MvMedian.Longs work = new MvMedian.Longs(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + long result = MvMedian.ascendingUnsignedLong(v, first, valueCount); + builder.appendLong(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index 6e16c8db4b896..44de42c08aa49 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -35,93 +34,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean value = v.getBoolean(first); + for (int i = first + 1; i < end; i++) { + boolean next = v.getBoolean(i); + value = MvMin.process(value, next); + } + boolean result = value; + builder.appendBoolean(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - boolean value = v.getBoolean(first); - for (int i = first + 1; i < end; i++) { - boolean next = v.getBoolean(i); - value = MvMin.process(value, next); - } - boolean result = value; - builder.appendBoolean(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - boolean value = v.getBoolean(first); - for (int i = first + 1; i < end; i++) { - boolean next = v.getBoolean(i); - value = MvMin.process(value, next); + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean value = v.getBoolean(first); + for (int i = first + 1; i < end; i++) { + boolean next = v.getBoolean(i); + value = MvMin.process(value, next); + } + boolean result = value; + builder.appendBoolean(result); } - boolean result = value; - builder.appendBoolean(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + boolean result = v.getBoolean(first + idx); + builder.appendBoolean(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - boolean result = v.getBoolean(first + idx); - builder.appendBoolean(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - BooleanBlock v = (BooleanBlock) fieldVal; - int positionCount = v.getPositionCount(); - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - boolean result = v.getBoolean(first + idx); - builder.appendBoolean(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + BooleanBlock v = (BooleanBlock) ref.block(); + int positionCount = v.getPositionCount(); + BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + boolean result = v.getBoolean(first + idx); + builder.appendBoolean(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index 99a671cf0a2df..855fdf7c6f6a3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -10,7 +10,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -36,101 +35,109 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef value = v.getBytesRef(first, firstScratch); + for (int i = first + 1; i < end; i++) { + BytesRef next = v.getBytesRef(i, nextScratch); + MvMin.process(value, next); + } + BytesRef result = value; + builder.appendBytesRef(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - BytesRef value = v.getBytesRef(first, firstScratch); - for (int i = first + 1; i < end; i++) { - BytesRef next = v.getBytesRef(i, nextScratch); - MvMin.process(value, next); - } - BytesRef result = value; - builder.appendBytesRef(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - BytesRef value = v.getBytesRef(first, firstScratch); - for (int i = first + 1; i < end; i++) { - BytesRef next = v.getBytesRef(i, nextScratch); - MvMin.process(value, next); + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef value = v.getBytesRef(first, firstScratch); + for (int i = first + 1; i < end; i++) { + BytesRef next = v.getBytesRef(i, nextScratch); + MvMin.process(value, next); + } + BytesRef result = value; + builder.appendBytesRef(result); } - BytesRef result = value; - builder.appendBytesRef(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + BytesRef result = v.getBytesRef(first + idx, firstScratch); + builder.appendBytesRef(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - BytesRef result = v.getBytesRef(first + idx, firstScratch); - builder.appendBytesRef(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - BytesRefBlock v = (BytesRefBlock) fieldVal; - int positionCount = v.getPositionCount(); - BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); - BytesRef firstScratch = new BytesRef(); - BytesRef nextScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - BytesRef result = v.getBytesRef(first + idx, firstScratch); - builder.appendBytesRef(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + BytesRefBlock v = (BytesRefBlock) ref.block(); + int positionCount = v.getPositionCount(); + BytesRefVector.Builder builder = BytesRefVector.newVectorBuilder(positionCount, driverContext.blockFactory()); + BytesRef firstScratch = new BytesRef(); + BytesRef nextScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + BytesRef result = v.getBytesRef(first + idx, firstScratch); + builder.appendBytesRef(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index e40ff78d0d364..1336634dca683 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double value = v.getDouble(first); + for (int i = first + 1; i < end; i++) { + double next = v.getDouble(i); + value = MvMin.process(value, next); + } + double result = value; + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - double value = v.getDouble(first); - for (int i = first + 1; i < end; i++) { - double next = v.getDouble(i); - value = MvMin.process(value, next); - } - double result = value; - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - double value = v.getDouble(first); - for (int i = first + 1; i < end; i++) { - double next = v.getDouble(i); - value = MvMin.process(value, next); + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double value = v.getDouble(first); + for (int i = first + 1; i < end; i++) { + double next = v.getDouble(i); + value = MvMin.process(value, next); + } + double result = value; + builder.appendDouble(result); } - double result = value; - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + double result = v.getDouble(first + idx); + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - double result = v.getDouble(first + idx); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - double result = v.getDouble(first + idx); - builder.appendDouble(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + double result = v.getDouble(first + idx); + builder.appendDouble(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index 9412930da53c5..ee48eca8e75c5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int value = v.getInt(first); + for (int i = first + 1; i < end; i++) { + int next = v.getInt(i); + value = MvMin.process(value, next); + } + int result = value; + builder.appendInt(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - int value = v.getInt(first); - for (int i = first + 1; i < end; i++) { - int next = v.getInt(i); - value = MvMin.process(value, next); - } - int result = value; - builder.appendInt(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - int value = v.getInt(first); - for (int i = first + 1; i < end; i++) { - int next = v.getInt(i); - value = MvMin.process(value, next); + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int value = v.getInt(first); + for (int i = first + 1; i < end; i++) { + int next = v.getInt(i); + value = MvMin.process(value, next); + } + int result = value; + builder.appendInt(result); } - int result = value; - builder.appendInt(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + int result = v.getInt(first + idx); + builder.appendInt(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - int result = v.getInt(first + idx); - builder.appendInt(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - int result = v.getInt(first + idx); - builder.appendInt(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + int result = v.getInt(first + idx); + builder.appendInt(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index 1fac131f0de0c..0b1c6b7c3468d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; @@ -34,93 +33,101 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNullable(fieldVal); + public Block.Ref evalNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long value = v.getLong(first); + for (int i = first + 1; i < end; i++) { + long next = v.getLong(i); + value = MvMin.process(value, next); + } + long result = value; + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - long value = v.getLong(first); - for (int i = first + 1; i < end; i++) { - long next = v.getLong(i); - value = MvMin.process(value, next); - } - long result = value; - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - if (fieldVal.mvOrdering() == Block.MvOrdering.ASCENDING) { - return evalAscendingNotNullable(fieldVal); + public Block.Ref evalNotNullable(Block.Ref ref) { + if (ref.block().mvSortedAscending()) { + return evalAscendingNotNullable(ref); } - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - long value = v.getLong(first); - for (int i = first + 1; i < end; i++) { - long next = v.getLong(i); - value = MvMin.process(value, next); + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long value = v.getLong(first); + for (int i = first + 1; i < end; i++) { + long next = v.getLong(i); + value = MvMin.process(value, next); + } + long result = value; + builder.appendLong(result); } - long result = value; - builder.appendLong(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Block evalAscendingNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + private Block.Ref evalAscendingNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + long result = v.getLong(first + idx); + builder.appendLong(result); } - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - long result = v.getLong(first + idx); - builder.appendLong(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order. */ - private Vector evalAscendingNotNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int idx = MvMin.ascendingIndex(valueCount); - long result = v.getLong(first + idx); - builder.appendLong(result); + private Block.Ref evalAscendingNotNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongVector.FixedBuilder builder = LongVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int idx = MvMin.ascendingIndex(valueCount); + long result = v.getLong(first + idx); + builder.appendLong(result); + } + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java index 207706d5dd9f9..1f142b02f1ff4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -35,49 +34,53 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + double value = v.getDouble(i); + MvSum.process(work, value); + } + double result = MvSum.finish(work); + builder.appendDouble(result); } - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - double value = v.getDouble(i); - MvSum.process(work, value); - } - double result = MvSum.finish(work); - builder.appendDouble(result); + return Block.Ref.floating(builder.build()); } - return builder.build(); } /** * Evaluate blocks containing at least one multivalued field. */ @Override - public Vector evalNotNullable(Block fieldVal) { - DoubleBlock v = (DoubleBlock) fieldVal; - int positionCount = v.getPositionCount(); - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); - CompensatedSum work = new CompensatedSum(); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - for (int i = first; i < end; i++) { - double value = v.getDouble(i); - MvSum.process(work, value); + public Block.Ref evalNotNullable(Block.Ref ref) { + try (ref) { + DoubleBlock v = (DoubleBlock) ref.block(); + int positionCount = v.getPositionCount(); + DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(positionCount, driverContext.blockFactory()); + CompensatedSum work = new CompensatedSum(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + for (int i = first; i < end; i++) { + double value = v.getDouble(i); + MvSum.process(work, value); + } + double result = MvSum.finish(work); + builder.appendDouble(result); } - double result = MvSum.finish(work); - builder.appendDouble(result); + return Block.Ref.floating(builder.build().asBlock()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index 98e5036c9be93..3968e5fd5a206 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -39,31 +39,33 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - IntBlock v = (IntBlock) fieldVal; - int positionCount = v.getPositionCount(); - IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - try { - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - int value = v.getInt(first); - for (int i = first + 1; i < end; i++) { - int next = v.getInt(i); - value = MvSum.process(value, next); + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + IntBlock v = (IntBlock) ref.block(); + int positionCount = v.getPositionCount(); + IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + try { + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int value = v.getInt(first); + for (int i = first + 1; i < end; i++) { + int next = v.getInt(i); + value = MvSum.process(value, next); + } + int result = value; + builder.appendInt(result); + } catch (ArithmeticException e) { + warnings.registerException(e); + builder.appendNull(); } - int result = value; - builder.appendInt(result); - } catch (ArithmeticException e) { - warnings.registerException(e); - builder.appendNull(); } + return Block.Ref.floating(builder.build()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index 9053a480a1355..8e5c2ea89974e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -39,31 +39,33 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - try { - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - long value = v.getLong(first); - for (int i = first + 1; i < end; i++) { - long next = v.getLong(i); - value = MvSum.process(value, next); + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + try { + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long value = v.getLong(first); + for (int i = first + 1; i < end; i++) { + long next = v.getLong(i); + value = MvSum.process(value, next); + } + long result = value; + builder.appendLong(result); + } catch (ArithmeticException e) { + warnings.registerException(e); + builder.appendNull(); } - long result = value; - builder.appendLong(result); - } catch (ArithmeticException e) { - warnings.registerException(e); - builder.appendNull(); } + return Block.Ref.floating(builder.build()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index 0354ba77edc26..f9bd2748e8f39 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -39,31 +39,33 @@ public String name() { * Evaluate blocks containing at least one multivalued field. */ @Override - public Block evalNullable(Block fieldVal) { - LongBlock v = (LongBlock) fieldVal; - int positionCount = v.getPositionCount(); - LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); - for (int p = 0; p < positionCount; p++) { - int valueCount = v.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; - } - try { - int first = v.getFirstValueIndex(p); - int end = first + valueCount; - long value = v.getLong(first); - for (int i = first + 1; i < end; i++) { - long next = v.getLong(i); - value = MvSum.processUnsignedLong(value, next); + public Block.Ref evalNullable(Block.Ref ref) { + try (ref) { + LongBlock v = (LongBlock) ref.block(); + int positionCount = v.getPositionCount(); + LongBlock.Builder builder = LongBlock.newBlockBuilder(positionCount, driverContext.blockFactory()); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + try { + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long value = v.getLong(first); + for (int i = first + 1; i < end; i++) { + long next = v.getLong(i); + value = MvSum.processUnsignedLong(value, next); + } + long result = value; + builder.appendLong(result); + } catch (ArithmeticException e) { + warnings.registerException(e); + builder.appendNull(); } - long result = value; - builder.appendLong(result); - } catch (ArithmeticException e) { - warnings.registerException(e); - builder.appendNull(); } + return Block.Ref.floating(builder.build()); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index d1e45b8ca68c7..1afacc1b02978 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; /** @@ -36,23 +37,27 @@ public ConcatEvaluator(BreakingBytesRefBuilder scratch, EvalOperator.ExpressionE } @Override - public Block eval(Page page) { - BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - Block block = values[i].eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + Block.Ref[] valuesRefs = new Block.Ref[values.length]; + try (Releasable valuesRelease = Releasables.wrap(valuesRefs)) { + BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesRefs[i] = values[i].eval(page); + Block block = valuesRefs[i].block(); + if (block.areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + valuesBlocks[i] = (BytesRefBlock) block; } - valuesBlocks[i] = (BytesRefBlock) block; - } - BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; - for (int i = 0; i < valuesBlocks.length; i++) { - valuesVectors[i] = valuesBlocks[i].asVector(); - if (valuesVectors[i] == null) { - return eval(page.getPositionCount(), valuesBlocks); + BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; + for (int i = 0; i < valuesBlocks.length; i++) { + valuesVectors[i] = valuesBlocks[i].asVector(); + if (valuesVectors[i] == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valuesBlocks)); + } } + return Block.Ref.floating(eval(page.getPositionCount(), valuesVectors).asBlock()); } - return eval(page.getPositionCount(), valuesVectors).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index a03b169d0bb2f..ca59429695aff 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -36,26 +36,28 @@ public EndsWithEvaluator(EvalOperator.ExpressionEvaluator str, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block suffixUncastBlock = suffix.eval(page); - if (suffixUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock suffixBlock = (BytesRefBlock) suffixUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, suffixBlock); - } - BytesRefVector suffixVector = suffixBlock.asVector(); - if (suffixVector == null) { - return eval(page.getPositionCount(), strBlock, suffixBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref suffixRef = suffix.eval(page)) { + if (suffixRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock suffixBlock = (BytesRefBlock) suffixRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, suffixBlock)); + } + BytesRefVector suffixVector = suffixBlock.asVector(); + if (suffixVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, suffixBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, suffixVector).asBlock()); + } } - return eval(page.getPositionCount(), strVector, suffixVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock suffixBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 4e5e567192082..69374940d47fa 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -30,17 +30,18 @@ public LTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driver } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock valBlock = (BytesRefBlock) valUncastBlock; - BytesRefVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index ecdf34a86c4a3..7dd29f069a7e4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -44,26 +44,28 @@ public LeftEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block lengthUncastBlock = length.eval(page); - if (lengthUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lengthBlock = (IntBlock) lengthUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, lengthBlock); - } - IntVector lengthVector = lengthBlock.asVector(); - if (lengthVector == null) { - return eval(page.getPositionCount(), strBlock, lengthBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref lengthRef = length.eval(page)) { + if (lengthRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lengthBlock = (IntBlock) lengthRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, lengthBlock)); + } + IntVector lengthVector = lengthBlock.asVector(); + if (lengthVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, lengthBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, lengthVector).asBlock()); + } } - return eval(page.getPositionCount(), strVector, lengthVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock lengthBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index d18a6bdaa5606..b0528390344c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -32,17 +32,18 @@ public LengthEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext drive } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock valBlock = (BytesRefBlock) valUncastBlock; - BytesRefVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public IntBlock eval(int positionCount, BytesRefBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index e128b73c55ee4..8d8232d727a48 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -30,17 +30,18 @@ public RTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driver } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock valBlock = (BytesRefBlock) valUncastBlock; - BytesRefVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java new file mode 100644 index 0000000000000..141e9f8b476e1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -0,0 +1,119 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.Override; +import java.lang.String; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Replace}. + * This class is generated. Do not edit it. + */ +public final class ReplaceConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final Pattern regex; + + private final EvalOperator.ExpressionEvaluator newStr; + + private final DriverContext driverContext; + + public ReplaceConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + Pattern regex, EvalOperator.ExpressionEvaluator newStr, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.regex = regex; + this.newStr = newStr; + this.driverContext = driverContext; + } + + @Override + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref newStrRef = newStr.eval(page)) { + if (newStrRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock newStrBlock = (BytesRefBlock) newStrRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, newStrBlock)); + } + BytesRefVector newStrVector = newStrBlock.asVector(); + if (newStrVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, newStrBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, newStrVector)); + } + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock newStrBlock) { + BytesRefBlock.Builder result = BytesRefBlock.newBlockBuilder(positionCount); + BytesRef strScratch = new BytesRef(); + BytesRef newStrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + if (newStrBlock.isNull(p) || newStrBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + try { + result.appendBytesRef(Replace.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), regex, newStrBlock.getBytesRef(newStrBlock.getFirstValueIndex(p), newStrScratch))); + } catch (PatternSyntaxException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + + public BytesRefBlock eval(int positionCount, BytesRefVector strVector, + BytesRefVector newStrVector) { + BytesRefBlock.Builder result = BytesRefBlock.newBlockBuilder(positionCount); + BytesRef strScratch = new BytesRef(); + BytesRef newStrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBytesRef(Replace.process(strVector.getBytesRef(p, strScratch), regex, newStrVector.getBytesRef(p, newStrScratch))); + } catch (PatternSyntaxException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + + @Override + public String toString() { + return "ReplaceConstantEvaluator[" + "str=" + str + ", regex=" + regex + ", newStr=" + newStr + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, newStr); + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java new file mode 100644 index 0000000000000..275a3ce0bc402 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -0,0 +1,136 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.Override; +import java.lang.String; +import java.util.regex.PatternSyntaxException; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Replace}. + * This class is generated. Do not edit it. + */ +public final class ReplaceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator regex; + + private final EvalOperator.ExpressionEvaluator newStr; + + private final DriverContext driverContext; + + public ReplaceEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator regex, EvalOperator.ExpressionEvaluator newStr, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.regex = regex; + this.newStr = newStr; + this.driverContext = driverContext; + } + + @Override + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref regexRef = regex.eval(page)) { + if (regexRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock regexBlock = (BytesRefBlock) regexRef.block(); + try (Block.Ref newStrRef = newStr.eval(page)) { + if (newStrRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock newStrBlock = (BytesRefBlock) newStrRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, regexBlock, newStrBlock)); + } + BytesRefVector regexVector = regexBlock.asVector(); + if (regexVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, regexBlock, newStrBlock)); + } + BytesRefVector newStrVector = newStrBlock.asVector(); + if (newStrVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, regexBlock, newStrBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, regexVector, newStrVector)); + } + } + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock regexBlock, + BytesRefBlock newStrBlock) { + BytesRefBlock.Builder result = BytesRefBlock.newBlockBuilder(positionCount); + BytesRef strScratch = new BytesRef(); + BytesRef regexScratch = new BytesRef(); + BytesRef newStrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + if (regexBlock.isNull(p) || regexBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + if (newStrBlock.isNull(p) || newStrBlock.getValueCount(p) != 1) { + result.appendNull(); + continue position; + } + try { + result.appendBytesRef(Replace.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), regexBlock.getBytesRef(regexBlock.getFirstValueIndex(p), regexScratch), newStrBlock.getBytesRef(newStrBlock.getFirstValueIndex(p), newStrScratch))); + } catch (PatternSyntaxException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + + public BytesRefBlock eval(int positionCount, BytesRefVector strVector, BytesRefVector regexVector, + BytesRefVector newStrVector) { + BytesRefBlock.Builder result = BytesRefBlock.newBlockBuilder(positionCount); + BytesRef strScratch = new BytesRef(); + BytesRef regexScratch = new BytesRef(); + BytesRef newStrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBytesRef(Replace.process(strVector.getBytesRef(p, strScratch), regexVector.getBytesRef(p, regexScratch), newStrVector.getBytesRef(p, newStrScratch))); + } catch (PatternSyntaxException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + + @Override + public String toString() { + return "ReplaceEvaluator[" + "str=" + str + ", regex=" + regex + ", newStr=" + newStr + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, regex, newStr); + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index 691c6dece4d2f..c3c9ca6b6772b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -44,26 +44,28 @@ public RightEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block lengthUncastBlock = length.eval(page); - if (lengthUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lengthBlock = (IntBlock) lengthUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, lengthBlock); - } - IntVector lengthVector = lengthBlock.asVector(); - if (lengthVector == null) { - return eval(page.getPositionCount(), strBlock, lengthBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref lengthRef = length.eval(page)) { + if (lengthRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lengthBlock = (IntBlock) lengthRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, lengthBlock)); + } + IntVector lengthVector = lengthBlock.asVector(); + if (lengthVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, lengthBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, lengthVector).asBlock()); + } } - return eval(page.getPositionCount(), strVector, lengthVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock lengthBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 09bb5b626eec8..b8315ef5b291d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -37,17 +37,18 @@ public SplitSingleByteEvaluator(EvalOperator.ExpressionEvaluator str, byte delim } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector)); } - return eval(page.getPositionCount(), strVector); } public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index 09be20be5bb48..5389a65d7f5f2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -37,26 +37,28 @@ public SplitVariableEvaluator(EvalOperator.ExpressionEvaluator str, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block delimUncastBlock = delim.eval(page); - if (delimUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock delimBlock = (BytesRefBlock) delimUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, delimBlock); - } - BytesRefVector delimVector = delimBlock.asVector(); - if (delimVector == null) { - return eval(page.getPositionCount(), strBlock, delimBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref delimRef = delim.eval(page)) { + if (delimRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock delimBlock = (BytesRefBlock) delimRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, delimBlock)); + } + BytesRefVector delimVector = delimBlock.asVector(); + if (delimVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, delimBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, delimVector)); + } } - return eval(page.getPositionCount(), strVector, delimVector); } public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock delimBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 94a21bc188047..9a38e8e511e93 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -36,26 +36,28 @@ public StartsWithEvaluator(EvalOperator.ExpressionEvaluator str, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block prefixUncastBlock = prefix.eval(page); - if (prefixUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock prefixBlock = (BytesRefBlock) prefixUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, prefixBlock); - } - BytesRefVector prefixVector = prefixBlock.asVector(); - if (prefixVector == null) { - return eval(page.getPositionCount(), strBlock, prefixBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref prefixRef = prefix.eval(page)) { + if (prefixRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock prefixBlock = (BytesRefBlock) prefixRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, prefixBlock)); + } + BytesRefVector prefixVector = prefixBlock.asVector(); + if (prefixVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, prefixBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, prefixVector).asBlock()); + } } - return eval(page.getPositionCount(), strVector, prefixVector).asBlock(); } public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock prefixBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index a3c0aac4f4ba5..68a8c48640069 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -40,35 +40,38 @@ public SubstringEvaluator(EvalOperator.ExpressionEvaluator str, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block startUncastBlock = start.eval(page); - if (startUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock startBlock = (IntBlock) startUncastBlock; - Block lengthUncastBlock = length.eval(page); - if (lengthUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lengthBlock = (IntBlock) lengthUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, startBlock, lengthBlock); - } - IntVector startVector = startBlock.asVector(); - if (startVector == null) { - return eval(page.getPositionCount(), strBlock, startBlock, lengthBlock); - } - IntVector lengthVector = lengthBlock.asVector(); - if (lengthVector == null) { - return eval(page.getPositionCount(), strBlock, startBlock, lengthBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref startRef = start.eval(page)) { + if (startRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock startBlock = (IntBlock) startRef.block(); + try (Block.Ref lengthRef = length.eval(page)) { + if (lengthRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lengthBlock = (IntBlock) lengthRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, startBlock, lengthBlock)); + } + IntVector startVector = startBlock.asVector(); + if (startVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, startBlock, lengthBlock)); + } + IntVector lengthVector = lengthBlock.asVector(); + if (lengthVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, startBlock, lengthBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, startVector, lengthVector).asBlock()); + } + } } - return eval(page.getPositionCount(), strVector, startVector, lengthVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock startBlock, diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index 199d1779fc250..1cdb3046807a5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -36,26 +36,28 @@ public SubstringNoLengthEvaluator(EvalOperator.ExpressionEvaluator str, } @Override - public Block eval(Page page) { - Block strUncastBlock = str.eval(page); - if (strUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock strBlock = (BytesRefBlock) strUncastBlock; - Block startUncastBlock = start.eval(page); - if (startUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock startBlock = (IntBlock) startUncastBlock; - BytesRefVector strVector = strBlock.asVector(); - if (strVector == null) { - return eval(page.getPositionCount(), strBlock, startBlock); - } - IntVector startVector = startBlock.asVector(); - if (startVector == null) { - return eval(page.getPositionCount(), strBlock, startBlock); + public Block.Ref eval(Page page) { + try (Block.Ref strRef = str.eval(page)) { + if (strRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); + try (Block.Ref startRef = start.eval(page)) { + if (startRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock startBlock = (IntBlock) startRef.block(); + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, startBlock)); + } + IntVector startVector = startBlock.asVector(); + if (startVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), strBlock, startBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), strVector, startVector).asBlock()); + } } - return eval(page.getPositionCount(), strVector, startVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock startBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index 765d622cb2597..3d809e986db28 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -30,17 +30,18 @@ public TrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverC } @Override - public Block eval(Page page) { - Block valUncastBlock = val.eval(page); - if (valUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock valBlock = (BytesRefBlock) valUncastBlock; - BytesRefVector valVector = valBlock.asVector(); - if (valVector == null) { - return eval(page.getPositionCount(), valBlock); + public Block.Ref eval(Page page) { + try (Block.Ref valRef = val.eval(page)) { + if (valRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), valBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), valVector).asBlock()); } - return eval(page.getPositionCount(), valVector).asBlock(); } public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index 801116d5f181e..7963282934b3c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -41,17 +41,18 @@ public AddDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator dat } @Override - public Block eval(Page page) { - Block datetimeUncastBlock = datetime.eval(page); - if (datetimeUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock datetimeBlock = (LongBlock) datetimeUncastBlock; - LongVector datetimeVector = datetimeBlock.asVector(); - if (datetimeVector == null) { - return eval(page.getPositionCount(), datetimeBlock); + public Block.Ref eval(Page page) { + try (Block.Ref datetimeRef = datetime.eval(page)) { + if (datetimeRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock datetimeBlock = (LongBlock) datetimeRef.block(); + LongVector datetimeVector = datetimeBlock.asVector(); + if (datetimeVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), datetimeBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), datetimeVector)); } - return eval(page.getPositionCount(), datetimeVector); } public LongBlock eval(int positionCount, LongBlock datetimeBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index be11848f7599a..3d09e44283bd6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -33,26 +33,28 @@ public AddDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index 4c2e1a53221b9..e38c31e75a281 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -39,26 +39,28 @@ public AddIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index eeb2ecc5b5c26..f2baf465d9c24 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -39,26 +39,28 @@ public AddLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 2684949674da5..7b1ac81f76d65 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -39,26 +39,28 @@ public AddUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index 356b6eba09b7a..cc79edcdbbbc8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -33,26 +33,28 @@ public DivDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index 3bfb8b6ca26c9..93810920f715c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -39,26 +39,28 @@ public DivIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 506fda9d33345..efea6612d21de 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -39,26 +39,28 @@ public DivLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index 6500d7222671a..ad3492bfefb74 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -39,26 +39,28 @@ public DivUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 0634573da6439..f9c2fd4eaf051 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -33,26 +33,28 @@ public ModDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 55fa8ceea09bf..be2ce2242b35c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -39,26 +39,28 @@ public ModIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index a1ad1d0c92c75..3b83d5182b4ca 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -39,26 +39,28 @@ public ModLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index c7462ae237eff..9564a10a1f6ed 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -39,26 +39,28 @@ public ModUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index cded768003656..2a03cee47271e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -33,26 +33,28 @@ public MulDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index 24be9244a5710..4932fa7129092 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -39,26 +39,28 @@ public MulIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index b8c1e1048e1e6..53639c766a6d2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -39,26 +39,28 @@ public MulLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index e427a89cab197..da65e507da7cd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -39,26 +39,28 @@ public MulUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 6bc8eb17d76c0..cf820059df7e8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -29,17 +29,18 @@ public NegDoublesEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext dri } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock vBlock = (DoubleBlock) vUncastBlock; - DoubleVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock vBlock = (DoubleBlock) vRef.block(); + DoubleVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector).asBlock()); } - return eval(page.getPositionCount(), vVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index b485d1952ccaa..a2f7f22da5db1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -36,17 +36,18 @@ public NegIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock vBlock = (IntBlock) vUncastBlock; - IntVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock vBlock = (IntBlock) vRef.block(); + IntVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector)); } - return eval(page.getPositionCount(), vVector); } public IntBlock eval(int positionCount, IntBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index 704255f81b46a..958308f5261c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -36,17 +36,18 @@ public NegLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, } @Override - public Block eval(Page page) { - Block vUncastBlock = v.eval(page); - if (vUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock vBlock = (LongBlock) vUncastBlock; - LongVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); + public Block.Ref eval(Page page) { + try (Block.Ref vRef = v.eval(page)) { + if (vRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock vBlock = (LongBlock) vRef.block(); + LongVector vVector = vBlock.asVector(); + if (vVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), vBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), vVector)); } - return eval(page.getPositionCount(), vVector); } public LongBlock eval(int positionCount, LongBlock vBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index 6841b2193f99e..fdc3ded7d8da2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -41,17 +41,18 @@ public SubDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator dat } @Override - public Block eval(Page page) { - Block datetimeUncastBlock = datetime.eval(page); - if (datetimeUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock datetimeBlock = (LongBlock) datetimeUncastBlock; - LongVector datetimeVector = datetimeBlock.asVector(); - if (datetimeVector == null) { - return eval(page.getPositionCount(), datetimeBlock); + public Block.Ref eval(Page page) { + try (Block.Ref datetimeRef = datetime.eval(page)) { + if (datetimeRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock datetimeBlock = (LongBlock) datetimeRef.block(); + LongVector datetimeVector = datetimeBlock.asVector(); + if (datetimeVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), datetimeBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), datetimeVector)); } - return eval(page.getPositionCount(), datetimeVector); } public LongBlock eval(int positionCount, LongBlock datetimeBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 8adfb0f12d300..5b11d4a627cf9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -33,26 +33,28 @@ public SubDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock lhsBlock = (DoubleBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - DoubleBlock rhsBlock = (DoubleBlock) rhsUncastBlock; - DoubleVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - DoubleVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); + DoubleVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + DoubleVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector).asBlock()); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 49b8a42bb6094..0f17f36eaabf3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -39,26 +39,28 @@ public SubIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock lhsBlock = (IntBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - IntBlock rhsBlock = (IntBlock) rhsUncastBlock; - IntVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - IntVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock lhsBlock = (IntBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + IntBlock rhsBlock = (IntBlock) rhsRef.block(); + IntVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + IntVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index f4a7d218252a5..d8ac1c94c99a2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -39,26 +39,28 @@ public SubLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 73ac871746356..441a375480768 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -39,26 +39,28 @@ public SubUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator } @Override - public Block eval(Page page) { - Block lhsUncastBlock = lhs.eval(page); - if (lhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock lhsBlock = (LongBlock) lhsUncastBlock; - Block rhsUncastBlock = rhs.eval(page); - if (rhsUncastBlock.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - LongBlock rhsBlock = (LongBlock) rhsUncastBlock; - LongVector lhsVector = lhsBlock.asVector(); - if (lhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); - } - LongVector rhsVector = rhsBlock.asVector(); - if (rhsVector == null) { - return eval(page.getPositionCount(), lhsBlock, rhsBlock); + public Block.Ref eval(Page page) { + try (Block.Ref lhsRef = lhs.eval(page)) { + if (lhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock lhsBlock = (LongBlock) lhsRef.block(); + try (Block.Ref rhsRef = rhs.eval(page)) { + if (rhsRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + LongBlock rhsBlock = (LongBlock) rhsRef.block(); + LongVector lhsVector = lhsBlock.asVector(); + if (lhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + LongVector rhsVector = rhsBlock.asVector(); + if (rhsVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), lhsBlock, rhsBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), lhsVector, rhsVector)); + } } - return eval(page.getPositionCount(), lhsVector, rhsVector); } public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index dc5f1ced16c11..e3777c8e85e36 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -86,16 +86,15 @@ record BooleanLogicExpressionEvaluator(BinaryLogic bl, ExpressionEvaluator leftE implements ExpressionEvaluator { @Override - public Block eval(Page page) { - Block lhs = leftEval.eval(page); - Block rhs = rightEval.eval(page); - - Vector lhsVector = lhs.asVector(); - Vector rhsVector = rhs.asVector(); - if (lhsVector != null && rhsVector != null) { - return eval((BooleanVector) lhsVector, (BooleanVector) rhsVector); + public Block.Ref eval(Page page) { + try (Block.Ref lhs = leftEval.eval(page); Block.Ref rhs = rightEval.eval(page)) { + Vector lhsVector = lhs.block().asVector(); + Vector rhsVector = rhs.block().asVector(); + if (lhsVector != null && rhsVector != null) { + return Block.Ref.floating(eval((BooleanVector) lhsVector, (BooleanVector) rhsVector)); + } + return Block.Ref.floating(eval(lhs.block(), rhs.block())); } - return eval(lhs, rhs); } /** @@ -164,8 +163,8 @@ static class Attributes extends ExpressionMapper { public ExpressionEvaluator.Factory map(Attribute attr, Layout layout) { record Attribute(int channel) implements ExpressionEvaluator { @Override - public Block eval(Page page) { - return page.getBlock(channel); + public Block.Ref eval(Page page) { + return new Block.Ref(page.getBlock(channel), page); } @Override @@ -182,8 +181,8 @@ static class Literals extends ExpressionMapper { public ExpressionEvaluator.Factory map(Literal lit, Layout layout) { record LiteralsEvaluator(IntFunction block) implements ExpressionEvaluator { @Override - public Block eval(Page page) { - return block.apply(page.getPositionCount()); + public Block.Ref eval(Page page) { + return Block.Ref.floating(block.apply(page.getPositionCount())); } @Override @@ -234,16 +233,17 @@ public ExpressionEvaluator.Factory map(IsNull isNull, Layout layout) { record IsNullEvaluator(EvalOperator.ExpressionEvaluator field) implements EvalOperator.ExpressionEvaluator { @Override - public Block eval(Page page) { - Block fieldBlock = field.eval(page); - if (fieldBlock.asVector() != null) { - return BooleanBlock.newConstantBlockWith(false, page.getPositionCount()); - } - boolean[] result = new boolean[page.getPositionCount()]; - for (int p = 0; p < page.getPositionCount(); p++) { - result[p] = fieldBlock.isNull(p); + public Block.Ref eval(Page page) { + try (Block.Ref fieldBlock = field.eval(page)) { + if (fieldBlock.block().asVector() != null) { + return Block.Ref.floating(BooleanBlock.newConstantBlockWith(false, page.getPositionCount())); + } + boolean[] result = new boolean[page.getPositionCount()]; + for (int p = 0; p < page.getPositionCount(); p++) { + result[p] = fieldBlock.block().isNull(p); + } + return Block.Ref.floating(new BooleanArrayVector(result, result.length).asBlock()); } - return new BooleanArrayVector(result, result.length).asBlock(); } @Override @@ -263,16 +263,17 @@ public ExpressionEvaluator.Factory map(IsNotNull isNotNull, Layout layout) { record IsNotNullEvaluator(EvalOperator.ExpressionEvaluator field) implements EvalOperator.ExpressionEvaluator { @Override - public Block eval(Page page) { - Block fieldBlock = field.eval(page); - if (fieldBlock.asVector() != null) { - return BooleanBlock.newConstantBlockWith(true, page.getPositionCount()); - } - boolean[] result = new boolean[page.getPositionCount()]; - for (int p = 0; p < page.getPositionCount(); p++) { - result[p] = fieldBlock.isNull(p) == false; + public Block.Ref eval(Page page) { + try (Block.Ref fieldBlock = field.eval(page)) { + if (fieldBlock.block().asVector() != null) { + return Block.Ref.floating(BooleanBlock.newConstantBlockWith(true, page.getPositionCount())); + } + boolean[] result = new boolean[page.getPositionCount()]; + for (int p = 0; p < page.getPositionCount(); p++) { + result[p] = fieldBlock.block().isNull(p) == false; + } + return Block.Ref.floating(new BooleanArrayVector(result, result.length).asBlock()); } - return new BooleanArrayVector(result, result.length).asBlock(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index b0fa12ec255bb..6d5932688558a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -37,8 +37,8 @@ public interface EvaluatorMapper { default Object fold() { return toJavaObject(toEvaluator(e -> driverContext -> new ExpressionEvaluator() { @Override - public Block eval(Page page) { - return fromArrayRow(e.fold())[0]; + public Block.Ref eval(Page page) { + return Block.Ref.floating(fromArrayRow(e.fold())[0]); } @Override @@ -49,6 +49,6 @@ public void close() {} // TODO maybe this should have a small fixed limit? new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) ) - ).eval(new Page(1)), 0); + ).eval(new Page(1)).block(), 0); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java index 8f6ec95123549..2be60292dff6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java @@ -47,7 +47,7 @@ public ExpressionEvaluator.Factory map(In in, Layout layout) { record InExpressionEvaluator(List listEvaluators) implements EvalOperator.ExpressionEvaluator { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { int positionCount = page.getPositionCount(); boolean[] values = new boolean[positionCount]; BitSet nulls = new BitSet(positionCount); // at least one evaluation resulted in NULL on a row @@ -55,21 +55,22 @@ public Block eval(Page page) { for (int i = 0; i < listEvaluators().size(); i++) { var evaluator = listEvaluators.get(i); - Block block = evaluator.eval(page); + try (Block.Ref ref = evaluator.eval(page)) { - Vector vector = block.asVector(); - if (vector != null) { - updateValues((BooleanVector) vector, values); - } else { - if (block.areAllValuesNull()) { - nullInValues = true; + Vector vector = ref.block().asVector(); + if (vector != null) { + updateValues((BooleanVector) vector, values); } else { - updateValues((BooleanBlock) block, values, nulls); + if (ref.block().areAllValuesNull()) { + nullInValues = true; + } else { + updateValues((BooleanBlock) ref.block(), values, nulls); + } } } } - return evalWithNulls(values, nulls, nullInValues); + return Block.Ref.floating(evalWithNulls(values, nulls, nullInValues)); } private static void updateValues(BooleanVector vector, boolean[] values) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3a675f1024387..e41d0f316e5f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -75,6 +75,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; @@ -146,6 +147,7 @@ private FunctionDefinition[][] functions() { def(RTrim.class, RTrim::new, "rtrim"), def(Trim.class, Trim::new, "trim"), def(Left.class, Left::new, "left"), + def(Replace.class, Replace::new, "replace"), def(Right.class, Right::new, "right"), def(StartsWith.class, StartsWith::new, "starts_with"), def(EndsWith.class, EndsWith::new, "ends_with") }, @@ -190,6 +192,10 @@ private FunctionDefinition[][] functions() { @Override protected String normalize(String name) { + return normalizeName(name); + } + + public static String normalizeName(String name) { return name.toLowerCase(Locale.ROOT); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 4abb1fa5349fe..4dab12db0d5cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -188,7 +188,7 @@ private record CaseEvaluator(ElementType resultType, List co implements EvalOperator.ExpressionEvaluator { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { /* * We have to evaluate lazily so any errors or warnings that would be * produced by the right hand side are avoided. And so if anything @@ -206,23 +206,28 @@ public Block eval(Page page) { IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) ); for (ConditionEvaluator condition : conditions) { - Block e = condition.condition.eval(limited); - if (e.areAllValuesNull()) { - continue; + try (Block.Ref conditionRef = condition.condition.eval(limited)) { + if (conditionRef.block().areAllValuesNull()) { + continue; + } + BooleanBlock b = (BooleanBlock) conditionRef.block(); + if (b.isNull(0)) { + continue; + } + if (false == b.getBoolean(b.getFirstValueIndex(0))) { + continue; + } + try (Block.Ref valueRef = condition.value.eval(limited)) { + result.copyFrom(valueRef.block(), 0, 1); + continue position; + } } - BooleanBlock b = (BooleanBlock) e; - if (b.isNull(0)) { - continue; - } - if (false == b.getBoolean(b.getFirstValueIndex(0))) { - continue; - } - result.copyFrom(condition.value.eval(limited), 0, 1); - continue position; } - result.copyFrom(elseVal.eval(limited), 0, 1); + try (Block.Ref elseRef = elseVal.eval(limited)) { + result.copyFrom(elseRef.block(), 0, 1); + } } - return result.build(); + return Block.Ref.floating(result.build()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 1af66cb4f50b0..d4c8727529eca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -9,9 +9,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasables; @@ -25,7 +27,6 @@ import java.util.Locale; import java.util.Map; -import java.util.function.BiFunction; import java.util.function.Function; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -48,7 +49,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel if (evaluator == null) { throw EsqlIllegalArgumentException.illegalDataType(sourceType); } - return dvrCtx -> evaluator.apply(fieldEval.get(dvrCtx), source()); + return dvrCtx -> evaluator.apply(fieldEval.get(dvrCtx), source(), dvrCtx); } @Override @@ -65,7 +66,7 @@ protected final TypeResolution resolveType() { ); } - protected abstract Map> evaluators(); + protected abstract Map> evaluators(); @Override public final Object fold() { @@ -101,13 +102,14 @@ protected AbstractEvaluator(EvalOperator.ExpressionEvaluator field, Source sourc */ protected abstract Block evalVector(Vector v); - public Block eval(Page page) { - Block block = fieldEvaluator.eval(page); - if (block.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); + public Block.Ref eval(Page page) { + try (Block.Ref ref = fieldEvaluator.eval(page)) { + if (ref.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); + } + Vector vector = ref.block().asVector(); + return Block.Ref.floating(vector == null ? evalBlock(ref.block()) : evalVector(vector)); } - Vector vector = block.asVector(); - return vector == null ? evalBlock(block) : evalVector(vector); } protected final void registerException(Exception exception) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 3ec6492ef0d8c..701b3fa67732c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -18,7 +20,6 @@ import java.math.BigInteger; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -30,10 +31,11 @@ public class ToBoolean extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( BOOLEAN, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, KEYWORD, ToBooleanFromStringEvaluator::new, DOUBLE, @@ -51,7 +53,9 @@ public ToBoolean(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index 5049a80d075f9..eb23e460b88ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; @@ -18,7 +20,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -29,12 +30,13 @@ public class ToDatetime extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DATETIME, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, LONG, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, KEYWORD, ToDatetimeFromStringEvaluator::new, DOUBLE, @@ -50,7 +52,9 @@ public ToDatetime(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index ec59446989bca..299e8cfe8643e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -29,16 +30,29 @@ * to degrees. */ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DOUBLE, ToDegreesEvaluator::new, INTEGER, - (field, source) -> new ToDegreesEvaluator(new ToDoubleFromIntEvaluator(field, source), source), + (field, source, driverContext) -> new ToDegreesEvaluator( + new ToDoubleFromIntEvaluator(field, source, driverContext), + source, + driverContext + ), LONG, - (field, source) -> new ToDegreesEvaluator(new ToDoubleFromLongEvaluator(field, source), source), + (field, source, driverContext) -> new ToDegreesEvaluator( + new ToDoubleFromLongEvaluator(field, source, driverContext), + source, + driverContext + ), UNSIGNED_LONG, - (field, source) -> new ToDegreesEvaluator(new ToDoubleFromUnsignedLongEvaluator(field, source), source) + (field, source, driverContext) -> new ToDegreesEvaluator( + new ToDoubleFromUnsignedLongEvaluator(field, source, driverContext), + source, + driverContext + ) ); public ToDegrees(Source source, Expression field) { @@ -46,7 +60,9 @@ public ToDegrees(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index dc8527637c7a3..690f7a66cbece 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -30,10 +31,11 @@ public class ToDouble extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DOUBLE, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToDoubleFromBooleanEvaluator::new, DATETIME, @@ -53,7 +55,9 @@ public ToDouble(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 0931033758dbb..d55b9d23975e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; @@ -25,15 +26,23 @@ public class ToIP extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of(IP, (fieldEval, source) -> fieldEval, KEYWORD, ToIPFromStringEvaluator::new); + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( + IP, + (fieldEval, source, driverContext) -> fieldEval, + KEYWORD, + ToIPFromStringEvaluator::new + ); public ToIP(Source source, Expression field) { super(source, field); } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 1d26c4724a423..0fcf62ed3864a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; @@ -31,10 +32,11 @@ public class ToInteger extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( INTEGER, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToIntegerFromBooleanEvaluator::new, DATETIME, @@ -54,7 +56,9 @@ public ToInteger(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index ffb31a77cb1fc..8e50dd8540ffd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; @@ -32,12 +33,13 @@ public class ToLong extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( LONG, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, DATETIME, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToLongFromBooleanEvaluator::new, KEYWORD, @@ -55,7 +57,9 @@ public ToLong(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index 8064303e204d5..8bb5180e09752 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -29,16 +30,29 @@ * to radians. */ public class ToRadians extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( DOUBLE, ToRadiansEvaluator::new, INTEGER, - (field, source) -> new ToRadiansEvaluator(new ToDoubleFromIntEvaluator(field, source), source), + (field, source, driverContext) -> new ToRadiansEvaluator( + new ToDoubleFromIntEvaluator(field, source, driverContext), + source, + driverContext + ), LONG, - (field, source) -> new ToRadiansEvaluator(new ToDoubleFromLongEvaluator(field, source), source), + (field, source, driverContext) -> new ToRadiansEvaluator( + new ToDoubleFromLongEvaluator(field, source, driverContext), + source, + driverContext + ), UNSIGNED_LONG, - (field, source) -> new ToRadiansEvaluator(new ToDoubleFromUnsignedLongEvaluator(field, source), source) + (field, source, driverContext) -> new ToRadiansEvaluator( + new ToDoubleFromUnsignedLongEvaluator(field, source, driverContext), + source, + driverContext + ) ); public ToRadians(Source source, Expression field) { @@ -46,7 +60,9 @@ public ToRadians(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 428c1f32b1fc7..af895ab7c56cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -21,7 +23,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -38,10 +39,11 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( KEYWORD, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, BOOLEAN, ToStringFromBooleanEvaluator::new, DATETIME, @@ -55,7 +57,7 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper INTEGER, ToStringFromIntEvaluator::new, TEXT, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, VERSION, ToStringFromVersionEvaluator::new, UNSIGNED_LONG, @@ -67,7 +69,9 @@ public ToString(Source source, @Named("v") Expression v) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 83deed6b18490..396aa03f39dc6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -17,7 +19,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; @@ -33,10 +34,11 @@ public class ToUnsignedLong extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.of( + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.of( UNSIGNED_LONG, - (fieldEval, source) -> fieldEval, + (fieldEval, source, driverContext) -> fieldEval, DATETIME, ToUnsignedLongFromLongEvaluator::new, BOOLEAN, @@ -56,7 +58,9 @@ public ToUnsignedLong(Source source, Expression field) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index 0051bee45eead..559e2fc4f89fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -19,7 +21,6 @@ import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; @@ -27,9 +28,10 @@ public class ToVersion extends AbstractConvertFunction { - private static final Map> EVALUATORS = - Map.ofEntries( - Map.entry(VERSION, (fieldEval, source) -> fieldEval), + private static final Map< + DataType, + TriFunction> EVALUATORS = Map.ofEntries( + Map.entry(VERSION, (fieldEval, source, driverContext) -> fieldEval), Map.entry(KEYWORD, ToVersionFromStringEvaluator::new), Map.entry(TEXT, ToVersionFromStringEvaluator::new) ); @@ -39,7 +41,9 @@ public ToVersion(Source source, @Named("v") Expression v) { } @Override - protected Map> evaluators() { + protected + Map> + evaluators() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 3291ab01f56a9..78344f0ae51b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasables; @@ -66,29 +65,29 @@ protected AbstractEvaluator(EvalOperator.ExpressionEvaluator field) { * valued fields and no null values. Building an array vector directly is * generally faster than building it via a {@link Block.Builder}. */ - protected abstract Vector evalNotNullable(Block fieldVal); + protected abstract Block.Ref evalNotNullable(Block.Ref fieldVal); /** * Called to evaluate single valued fields when the target block does not * have null values. */ - protected Vector evalSingleValuedNotNullable(Block fieldVal) { - return fieldVal.asVector(); + protected Block.Ref evalSingleValuedNotNullable(Block.Ref fieldRef) { + return fieldRef; } @Override - public final Block eval(Page page) { - Block fieldVal = field.eval(page); - if (fieldVal.mayHaveMultivaluedFields() == false) { - if (fieldVal.mayHaveNulls()) { - return evalSingleValuedNullable(fieldVal); + public final Block.Ref eval(Page page) { + Block.Ref ref = field.eval(page); + if (ref.block().mayHaveMultivaluedFields() == false) { + if (ref.block().mayHaveNulls()) { + return evalSingleValuedNullable(ref); } - return evalSingleValuedNotNullable(fieldVal).asBlock(); + return evalSingleValuedNotNullable(ref); } - if (fieldVal.mayHaveNulls()) { - return evalNullable(fieldVal); + if (ref.block().mayHaveNulls()) { + return evalNullable(ref); } - return evalNotNullable(fieldVal).asBlock(); + return evalNotNullable(ref); } } @@ -107,20 +106,20 @@ protected AbstractNullableEvaluator(EvalOperator.ExpressionEvaluator field) { /** * Called when evaluating a {@link Block} that contains null values. */ - protected abstract Block evalNullable(Block fieldVal); + protected abstract Block.Ref evalNullable(Block.Ref fieldVal); /** * Called to evaluate single valued fields when the target block has null * values. */ - protected Block evalSingleValuedNullable(Block fieldVal) { - return fieldVal; + protected Block.Ref evalSingleValuedNullable(Block.Ref fieldRef) { + return fieldRef; } @Override - public Block eval(Page page) { - Block fieldVal = field.eval(page); - return fieldVal.mayHaveMultivaluedFields() ? evalNullable(fieldVal) : evalSingleValuedNullable(fieldVal); + public Block.Ref eval(Page page) { + Block.Ref fieldRef = field.eval(page); + return fieldRef.block().mayHaveMultivaluedFields() ? evalNullable(fieldRef) : evalSingleValuedNullable(fieldRef); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index 43b305ab1b07a..237cd3a3205b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -96,46 +96,46 @@ private class MvConcatEvaluator implements EvalOperator.ExpressionEvaluator { } @Override - public final Block eval(Page page) { - Block fieldUncast = field.eval(page); - Block delimUncast = delim.eval(page); - if (fieldUncast.areAllValuesNull() || delimUncast.areAllValuesNull()) { - return Block.constantNullBlock(page.getPositionCount()); - } - BytesRefBlock fieldVal = (BytesRefBlock) fieldUncast; - BytesRefBlock delimVal = (BytesRefBlock) delimUncast; - - int positionCount = page.getPositionCount(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); - BytesRefBuilder work = new BytesRefBuilder(); // TODO BreakingBytesRefBuilder so we don't blow past circuit breakers - BytesRef fieldScratch = new BytesRef(); - BytesRef delimScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - int fieldValueCount = fieldVal.getValueCount(p); - if (fieldValueCount == 0) { - builder.appendNull(); - continue; - } - if (delimVal.getValueCount(p) != 1) { - builder.appendNull(); - continue; - } - int first = fieldVal.getFirstValueIndex(p); - if (fieldValueCount == 1) { - builder.appendBytesRef(fieldVal.getBytesRef(first, fieldScratch)); - continue; + public final Block.Ref eval(Page page) { + try (Block.Ref fieldRef = field.eval(page); Block.Ref delimRef = delim.eval(page)) { + if (fieldRef.block().areAllValuesNull() || delimRef.block().areAllValuesNull()) { + return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount())); } - int end = first + fieldValueCount; - BytesRef delim = delimVal.getBytesRef(delimVal.getFirstValueIndex(p), delimScratch); - work.clear(); - work.append(fieldVal.getBytesRef(first, fieldScratch)); - for (int i = first + 1; i < end; i++) { - work.append(delim); - work.append(fieldVal.getBytesRef(i, fieldScratch)); + BytesRefBlock fieldVal = (BytesRefBlock) fieldRef.block(); + BytesRefBlock delimVal = (BytesRefBlock) delimRef.block(); + + int positionCount = page.getPositionCount(); + BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(positionCount); + BytesRefBuilder work = new BytesRefBuilder(); // TODO BreakingBytesRefBuilder so we don't blow past circuit breakers + BytesRef fieldScratch = new BytesRef(); + BytesRef delimScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int fieldValueCount = fieldVal.getValueCount(p); + if (fieldValueCount == 0) { + builder.appendNull(); + continue; + } + if (delimVal.getValueCount(p) != 1) { + builder.appendNull(); + continue; + } + int first = fieldVal.getFirstValueIndex(p); + if (fieldValueCount == 1) { + builder.appendBytesRef(fieldVal.getBytesRef(first, fieldScratch)); + continue; + } + int end = first + fieldValueCount; + BytesRef delim = delimVal.getBytesRef(delimVal.getFirstValueIndex(p), delimScratch); + work.clear(); + work.append(fieldVal.getBytesRef(first, fieldScratch)); + for (int i = first + 1; i < end; i++) { + work.append(delim); + work.append(fieldVal.getBytesRef(i, fieldScratch)); + } + builder.appendBytesRef(work.get()); } - builder.appendBytesRef(work.get()); + return Block.Ref.floating(builder.build()); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 431c3f568a1db..77541b3b56594 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -8,10 +8,10 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ConstantIntVector; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -69,36 +69,44 @@ protected String name() { } @Override - protected Block evalNullable(Block fieldVal) { - IntBlock.Builder builder = IntBlock.newBlockBuilder(fieldVal.getPositionCount()); - for (int p = 0; p < fieldVal.getPositionCount(); p++) { - int valueCount = fieldVal.getValueCount(p); - if (valueCount == 0) { - builder.appendNull(); - continue; + protected Block.Ref evalNullable(Block.Ref ref) { + try (ref; IntBlock.Builder builder = IntBlock.newBlockBuilder(ref.block().getPositionCount())) { + for (int p = 0; p < ref.block().getPositionCount(); p++) { + int valueCount = ref.block().getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + builder.appendInt(valueCount); } - builder.appendInt(valueCount); + return Block.Ref.floating(builder.build()); } - return builder.build(); } @Override - protected Vector evalNotNullable(Block fieldVal) { - int[] values = new int[fieldVal.getPositionCount()]; - for (int p = 0; p < fieldVal.getPositionCount(); p++) { - values[p] = fieldVal.getValueCount(p); + protected Block.Ref evalNotNullable(Block.Ref ref) { + try ( + ref; + IntVector.FixedBuilder builder = IntVector.newVectorFixedBuilder( + ref.block().getPositionCount(), + BlockFactory.getNonBreakingInstance() + ) + ) { + for (int p = 0; p < ref.block().getPositionCount(); p++) { + builder.appendInt(ref.block().getValueCount(p)); + } + return Block.Ref.floating(builder.build().asBlock()); } - return new IntArrayVector(values, values.length); } @Override - protected Block evalSingleValuedNullable(Block fieldVal) { + protected Block.Ref evalSingleValuedNullable(Block.Ref fieldVal) { return evalNullable(fieldVal); } @Override - protected Vector evalSingleValuedNotNullable(Block fieldVal) { - return new ConstantIntVector(1, fieldVal.getPositionCount()); + protected Block.Ref evalSingleValuedNotNullable(Block.Ref fieldVal) { + return Block.Ref.floating(new ConstantIntVector(1, fieldVal.block().getPositionCount()).asBlock()); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 410196f0785f3..da5dbd958755b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -129,7 +129,7 @@ private record CoalesceEvaluator(ElementType resultType, List page.getBlock(b).filter(positions)).toArray(Block[]::new) ); for (EvalOperator.ExpressionEvaluator eval : evaluators) { - Block e = eval.eval(limited); - if (false == e.isNull(0)) { - result.copyFrom(e, 0, 1); - continue position; + try (Block.Ref ref = eval.eval(limited)) { + if (false == ref.block().isNull(0)) { + result.copyFrom(ref.block(), 0, 1); + continue position; + } } } result.appendNull(); } - return result.build(); + return Block.Ref.floating(result.build()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index b03d9bdc409c4..1470c3ec1e5ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -61,8 +61,9 @@ *
  • * There are also methods annotated with {@link org.elasticsearch.compute.ann.Evaluator} * that contain the actual inner implementation of the function. Modify those to look right - * and click {@code Build->Recompile 'FunctionName.java'} in IntelliJ. This should generate - * an {@link org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator} implementation + * and click {@code Build->Recompile 'FunctionName.java'} in IntelliJ or run the + * {@code CsvTests} again. This should generate an + * {@link org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator} implementation * calling the method annotated with {@link org.elasticsearch.compute.ann.Evaluator}. Please commit the * generated evaluator before submitting your PR. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java new file mode 100644 index 0000000000000..99d44b534ac26 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; + +public class Replace extends ScalarFunction implements EvaluatorMapper { + + private final Expression str; + private final Expression newStr; + private final Expression regex; + + public Replace(Source source, Expression str, Expression regex, Expression newStr) { + super(source, Arrays.asList(str, regex, newStr)); + this.str = str; + this.regex = regex; + this.newStr = newStr; + } + + @Override + public DataType dataType() { + return DataTypes.KEYWORD; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(str, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + + resolution = isString(regex, sourceText(), SECOND); + if (resolution.unresolved()) { + return resolution; + } + + return isString(newStr, sourceText(), THIRD); + } + + @Override + public boolean foldable() { + return str.foldable() && regex.foldable() && newStr.foldable(); + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Evaluator(extraName = "Constant", warnExceptions = PatternSyntaxException.class) + static BytesRef process(BytesRef str, @Fixed Pattern regex, BytesRef newStr) { + if (str == null || regex == null || newStr == null) { + return null; + } + return new BytesRef(regex.matcher(str.utf8ToString()).replaceAll(newStr.utf8ToString())); + } + + @Evaluator(warnExceptions = PatternSyntaxException.class) + static BytesRef process(BytesRef str, BytesRef regex, BytesRef newStr) { + if (str == null) { + return null; + } + + if (regex == null || newStr == null) { + return str; + } + return new BytesRef(str.utf8ToString().replaceAll(regex.utf8ToString(), newStr.utf8ToString())); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Replace(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Replace::new, str, regex, newStr); + } + + @Override + public ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var strEval = toEvaluator.apply(str); + var newStrEval = toEvaluator.apply(newStr); + + if (regex.foldable() && regex.dataType() == DataTypes.KEYWORD) { + Pattern regexPattern; + try { + regexPattern = Pattern.compile(((BytesRef) regex.fold()).utf8ToString()); + } catch (PatternSyntaxException pse) { + // TODO this is not right (inconsistent). See also https://github.com/elastic/elasticsearch/issues/100038 + // this should generate a header warning and return null (as do the rest of this functionality in evaluators), + // but for the moment we let the exception through + throw pse; + } + return (drvCtx) -> new ReplaceConstantEvaluator(source(), strEval.get(drvCtx), regexPattern, newStrEval.get(drvCtx), drvCtx); + } + + var regexEval = toEvaluator.apply(regex); + return (drvCtx) -> new ReplaceEvaluator(source(), strEval.get(drvCtx), regexEval.get(drvCtx), newStrEval.get(drvCtx), drvCtx); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 91a3dd62232ec..55586232ebdab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -93,6 +93,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; @@ -352,6 +353,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit), of(ScalarFunction.class, Tau.class, PlanNamedTypes::writeNoArgScalar, PlanNamedTypes::readNoArgScalar), + of(ScalarFunction.class, Replace.class, PlanNamedTypes::writeReplace, PlanNamedTypes::readReplace), // ArithmeticOperations of(ArithmeticOperation.class, Add.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), of(ArithmeticOperation.class, Sub.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), @@ -1308,6 +1310,18 @@ static void writeSubstring(PlanStreamOutput out, Substring substring) throws IOE out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); } + static Replace readReplace(PlanStreamInput in) throws IOException { + return new Replace(Source.EMPTY, in.readExpression(), in.readExpression(), in.readExpression()); + } + + static void writeReplace(PlanStreamOutput out, Replace replace) throws IOException { + List fields = replace.children(); + assert fields.size() == 3; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + out.writeExpression(fields.get(2)); + } + static Left readLeft(PlanStreamInput in) throws IOException { return new Left(Source.EMPTY, in.readExpression(), in.readExpression()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 0f4d194d8016c..eb4b11f5e2e34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -7,15 +7,19 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules.OptimizerRule; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; @@ -23,15 +27,19 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; +import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PhysicalVerificationException; import org.elasticsearch.xpack.esql.planner.PhysicalVerifier; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.ql.common.Failure; +import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.MetadataAttribute; +import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; import org.elasticsearch.xpack.ql.expression.TypedAttribute; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -48,6 +56,7 @@ import org.elasticsearch.xpack.ql.rule.Rule; import org.elasticsearch.xpack.ql.util.Queries; import org.elasticsearch.xpack.ql.util.Queries.Clause; +import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.ArrayList; import java.util.Collection; @@ -58,6 +67,9 @@ import java.util.function.Supplier; import static java.util.Arrays.asList; +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType.COUNT; import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.splitAnd; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP; @@ -90,6 +102,7 @@ protected List> rules(boolean optimizeForEsSource) { esSourceRules.add(new PushTopNToSource()); esSourceRules.add(new PushLimitToSource()); esSourceRules.add(new PushFiltersToSource()); + esSourceRules.add(new PushStatsToSource()); } // execute the rules multiple times to improve the chances of things being pushed down @@ -304,6 +317,68 @@ private List buildFieldSorts(List orders) { } } + /** + * Looks for the case where certain stats exist right before the query and thus can be pushed down. + */ + private static class PushStatsToSource extends OptimizerRule { + + @Override + protected PhysicalPlan rule(AggregateExec aggregateExec) { + PhysicalPlan plan = aggregateExec; + if (aggregateExec.child() instanceof EsQueryExec queryExec) { + var tuple = pushableStats(aggregateExec); + + // TODO: handle case where some aggs cannot be pushed down by breaking the aggs into two sources (regular + stats) + union + // use the stats since the attributes are larger in size (due to seen) + if (tuple.v2().size() == aggregateExec.aggregates().size()) { + plan = new EsStatsQueryExec( + aggregateExec.source(), + queryExec.index(), + queryExec.query(), + queryExec.limit(), + tuple.v1(), + tuple.v2() + ); + } + } + return plan; + } + + private Tuple, List> pushableStats(AggregateExec aggregate) { + AttributeMap stats = new AttributeMap<>(); + Tuple, List> tuple = new Tuple<>(new ArrayList(), new ArrayList()); + + if (aggregate.groupings().isEmpty()) { + for (NamedExpression agg : aggregate.aggregates()) { + var attribute = agg.toAttribute(); + Stat stat = stats.computeIfAbsent(attribute, a -> { + if (agg instanceof Alias as) { + Expression child = as.child(); + if (child instanceof Count count) { + var target = count.field(); + // TODO: add count over field (has to be field attribute) + if (target.foldable()) { + return new Stat(StringUtils.WILDCARD, COUNT); + } + } + } + return null; + }); + if (stat != null) { + List intermediateAttributes = AbstractPhysicalOperationProviders.intermediateAttributes( + singletonList(agg), + emptyList() + ); + tuple.v1().addAll(intermediateAttributes); + tuple.v2().add(stat); + } + } + } + + return tuple; + } + } + private static final class EsqlTranslatorHandler extends QlTranslatorHandler { @Override public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index d7b31cd220760..be46b6c6e1797 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -127,7 +127,7 @@ public Vocabulary getVocabulary() { } - @SuppressWarnings("this-escape") public EsqlBaseLexer(CharStream input) { + public EsqlBaseLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 349f31f7c476d..658e09ca4b190 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -177,6 +177,7 @@ regexBooleanExpression valueExpression operatorExpression primaryExpression +functionExpression rowCommand fields field @@ -216,4 +217,4 @@ enrichWithClause atn: -[4, 1, 81, 501, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 102, 8, 1, 10, 1, 12, 1, 105, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 111, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 126, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 138, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 145, 8, 5, 10, 5, 12, 5, 148, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 3, 5, 159, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 167, 8, 5, 10, 5, 12, 5, 170, 9, 5, 1, 6, 1, 6, 3, 6, 174, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 181, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 186, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 193, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 199, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 207, 8, 8, 10, 8, 12, 8, 210, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 223, 8, 9, 10, 9, 12, 9, 226, 9, 9, 3, 9, 228, 8, 9, 1, 9, 1, 9, 3, 9, 232, 8, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 5, 11, 240, 8, 11, 10, 11, 12, 11, 243, 9, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 250, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 256, 8, 13, 10, 13, 12, 13, 259, 9, 13, 1, 13, 3, 13, 262, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 269, 8, 14, 10, 14, 12, 14, 272, 9, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 281, 8, 16, 1, 16, 1, 16, 3, 16, 285, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 291, 8, 17, 1, 18, 1, 18, 1, 18, 5, 18, 296, 8, 18, 10, 18, 12, 18, 299, 9, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 306, 8, 20, 10, 20, 12, 20, 309, 9, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 326, 8, 22, 10, 22, 12, 22, 329, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 337, 8, 22, 10, 22, 12, 22, 340, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 348, 8, 22, 10, 22, 12, 22, 351, 9, 22, 1, 22, 1, 22, 3, 22, 355, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 364, 8, 24, 10, 24, 12, 24, 367, 9, 24, 1, 25, 1, 25, 3, 25, 371, 8, 25, 1, 25, 1, 25, 3, 25, 375, 8, 25, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 381, 8, 26, 10, 26, 12, 26, 384, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 390, 8, 26, 10, 26, 12, 26, 393, 9, 26, 3, 26, 395, 8, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 401, 8, 27, 10, 27, 12, 27, 404, 9, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 410, 8, 28, 10, 28, 12, 28, 413, 9, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 3, 30, 423, 8, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 5, 33, 435, 8, 33, 10, 33, 12, 33, 438, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 3, 36, 448, 8, 36, 1, 37, 3, 37, 451, 8, 37, 1, 37, 1, 37, 1, 38, 3, 38, 456, 8, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 475, 8, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 481, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 487, 8, 44, 10, 44, 12, 44, 490, 9, 44, 3, 44, 492, 8, 44, 1, 45, 1, 45, 1, 45, 3, 45, 497, 8, 45, 1, 45, 1, 45, 1, 45, 0, 3, 2, 10, 16, 46, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 76, 77, 1, 0, 67, 68, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 54, 59, 531, 0, 92, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 4, 110, 1, 0, 0, 0, 6, 125, 1, 0, 0, 0, 8, 127, 1, 0, 0, 0, 10, 158, 1, 0, 0, 0, 12, 185, 1, 0, 0, 0, 14, 192, 1, 0, 0, 0, 16, 198, 1, 0, 0, 0, 18, 231, 1, 0, 0, 0, 20, 233, 1, 0, 0, 0, 22, 236, 1, 0, 0, 0, 24, 249, 1, 0, 0, 0, 26, 251, 1, 0, 0, 0, 28, 263, 1, 0, 0, 0, 30, 275, 1, 0, 0, 0, 32, 278, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 292, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 302, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 354, 1, 0, 0, 0, 46, 356, 1, 0, 0, 0, 48, 359, 1, 0, 0, 0, 50, 368, 1, 0, 0, 0, 52, 394, 1, 0, 0, 0, 54, 396, 1, 0, 0, 0, 56, 405, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 418, 1, 0, 0, 0, 62, 424, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 431, 1, 0, 0, 0, 68, 439, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 450, 1, 0, 0, 0, 76, 455, 1, 0, 0, 0, 78, 459, 1, 0, 0, 0, 80, 461, 1, 0, 0, 0, 82, 463, 1, 0, 0, 0, 84, 466, 1, 0, 0, 0, 86, 474, 1, 0, 0, 0, 88, 476, 1, 0, 0, 0, 90, 496, 1, 0, 0, 0, 92, 93, 3, 2, 1, 0, 93, 94, 5, 0, 0, 1, 94, 1, 1, 0, 0, 0, 95, 96, 6, 1, -1, 0, 96, 97, 3, 4, 2, 0, 97, 103, 1, 0, 0, 0, 98, 99, 10, 1, 0, 0, 99, 100, 5, 26, 0, 0, 100, 102, 3, 6, 3, 0, 101, 98, 1, 0, 0, 0, 102, 105, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 103, 104, 1, 0, 0, 0, 104, 3, 1, 0, 0, 0, 105, 103, 1, 0, 0, 0, 106, 111, 3, 82, 41, 0, 107, 111, 3, 26, 13, 0, 108, 111, 3, 20, 10, 0, 109, 111, 3, 86, 43, 0, 110, 106, 1, 0, 0, 0, 110, 107, 1, 0, 0, 0, 110, 108, 1, 0, 0, 0, 110, 109, 1, 0, 0, 0, 111, 5, 1, 0, 0, 0, 112, 126, 3, 30, 15, 0, 113, 126, 3, 34, 17, 0, 114, 126, 3, 46, 23, 0, 115, 126, 3, 52, 26, 0, 116, 126, 3, 48, 24, 0, 117, 126, 3, 32, 16, 0, 118, 126, 3, 8, 4, 0, 119, 126, 3, 54, 27, 0, 120, 126, 3, 56, 28, 0, 121, 126, 3, 60, 30, 0, 122, 126, 3, 62, 31, 0, 123, 126, 3, 88, 44, 0, 124, 126, 3, 64, 32, 0, 125, 112, 1, 0, 0, 0, 125, 113, 1, 0, 0, 0, 125, 114, 1, 0, 0, 0, 125, 115, 1, 0, 0, 0, 125, 116, 1, 0, 0, 0, 125, 117, 1, 0, 0, 0, 125, 118, 1, 0, 0, 0, 125, 119, 1, 0, 0, 0, 125, 120, 1, 0, 0, 0, 125, 121, 1, 0, 0, 0, 125, 122, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 7, 1, 0, 0, 0, 127, 128, 5, 18, 0, 0, 128, 129, 3, 10, 5, 0, 129, 9, 1, 0, 0, 0, 130, 131, 6, 5, -1, 0, 131, 132, 5, 44, 0, 0, 132, 159, 3, 10, 5, 7, 133, 159, 3, 14, 7, 0, 134, 159, 3, 12, 6, 0, 135, 137, 3, 14, 7, 0, 136, 138, 5, 44, 0, 0, 137, 136, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 5, 41, 0, 0, 140, 141, 5, 40, 0, 0, 141, 146, 3, 14, 7, 0, 142, 143, 5, 34, 0, 0, 143, 145, 3, 14, 7, 0, 144, 142, 1, 0, 0, 0, 145, 148, 1, 0, 0, 0, 146, 144, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 149, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 149, 150, 5, 50, 0, 0, 150, 159, 1, 0, 0, 0, 151, 152, 3, 14, 7, 0, 152, 154, 5, 42, 0, 0, 153, 155, 5, 44, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 45, 0, 0, 157, 159, 1, 0, 0, 0, 158, 130, 1, 0, 0, 0, 158, 133, 1, 0, 0, 0, 158, 134, 1, 0, 0, 0, 158, 135, 1, 0, 0, 0, 158, 151, 1, 0, 0, 0, 159, 168, 1, 0, 0, 0, 160, 161, 10, 4, 0, 0, 161, 162, 5, 31, 0, 0, 162, 167, 3, 10, 5, 5, 163, 164, 10, 3, 0, 0, 164, 165, 5, 47, 0, 0, 165, 167, 3, 10, 5, 4, 166, 160, 1, 0, 0, 0, 166, 163, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 11, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 171, 173, 3, 14, 7, 0, 172, 174, 5, 44, 0, 0, 173, 172, 1, 0, 0, 0, 173, 174, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 5, 43, 0, 0, 176, 177, 3, 78, 39, 0, 177, 186, 1, 0, 0, 0, 178, 180, 3, 14, 7, 0, 179, 181, 5, 44, 0, 0, 180, 179, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 5, 49, 0, 0, 183, 184, 3, 78, 39, 0, 184, 186, 1, 0, 0, 0, 185, 171, 1, 0, 0, 0, 185, 178, 1, 0, 0, 0, 186, 13, 1, 0, 0, 0, 187, 193, 3, 16, 8, 0, 188, 189, 3, 16, 8, 0, 189, 190, 3, 80, 40, 0, 190, 191, 3, 16, 8, 0, 191, 193, 1, 0, 0, 0, 192, 187, 1, 0, 0, 0, 192, 188, 1, 0, 0, 0, 193, 15, 1, 0, 0, 0, 194, 195, 6, 8, -1, 0, 195, 199, 3, 18, 9, 0, 196, 197, 7, 0, 0, 0, 197, 199, 3, 16, 8, 3, 198, 194, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 199, 208, 1, 0, 0, 0, 200, 201, 10, 2, 0, 0, 201, 202, 7, 1, 0, 0, 202, 207, 3, 16, 8, 3, 203, 204, 10, 1, 0, 0, 204, 205, 7, 0, 0, 0, 205, 207, 3, 16, 8, 2, 206, 200, 1, 0, 0, 0, 206, 203, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 17, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 211, 232, 3, 44, 22, 0, 212, 232, 3, 40, 20, 0, 213, 214, 5, 40, 0, 0, 214, 215, 3, 10, 5, 0, 215, 216, 5, 50, 0, 0, 216, 232, 1, 0, 0, 0, 217, 218, 3, 42, 21, 0, 218, 227, 5, 40, 0, 0, 219, 224, 3, 10, 5, 0, 220, 221, 5, 34, 0, 0, 221, 223, 3, 10, 5, 0, 222, 220, 1, 0, 0, 0, 223, 226, 1, 0, 0, 0, 224, 222, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 228, 1, 0, 0, 0, 226, 224, 1, 0, 0, 0, 227, 219, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 5, 50, 0, 0, 230, 232, 1, 0, 0, 0, 231, 211, 1, 0, 0, 0, 231, 212, 1, 0, 0, 0, 231, 213, 1, 0, 0, 0, 231, 217, 1, 0, 0, 0, 232, 19, 1, 0, 0, 0, 233, 234, 5, 14, 0, 0, 234, 235, 3, 22, 11, 0, 235, 21, 1, 0, 0, 0, 236, 241, 3, 24, 12, 0, 237, 238, 5, 34, 0, 0, 238, 240, 3, 24, 12, 0, 239, 237, 1, 0, 0, 0, 240, 243, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 23, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 244, 250, 3, 10, 5, 0, 245, 246, 3, 40, 20, 0, 246, 247, 5, 33, 0, 0, 247, 248, 3, 10, 5, 0, 248, 250, 1, 0, 0, 0, 249, 244, 1, 0, 0, 0, 249, 245, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 252, 5, 6, 0, 0, 252, 257, 3, 38, 19, 0, 253, 254, 5, 34, 0, 0, 254, 256, 3, 38, 19, 0, 255, 253, 1, 0, 0, 0, 256, 259, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 261, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 260, 262, 3, 28, 14, 0, 261, 260, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 27, 1, 0, 0, 0, 263, 264, 5, 65, 0, 0, 264, 265, 5, 73, 0, 0, 265, 270, 3, 38, 19, 0, 266, 267, 5, 34, 0, 0, 267, 269, 3, 38, 19, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 274, 5, 66, 0, 0, 274, 29, 1, 0, 0, 0, 275, 276, 5, 4, 0, 0, 276, 277, 3, 22, 11, 0, 277, 31, 1, 0, 0, 0, 278, 280, 5, 17, 0, 0, 279, 281, 3, 22, 11, 0, 280, 279, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 283, 5, 30, 0, 0, 283, 285, 3, 36, 18, 0, 284, 282, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 33, 1, 0, 0, 0, 286, 287, 5, 8, 0, 0, 287, 290, 3, 22, 11, 0, 288, 289, 5, 30, 0, 0, 289, 291, 3, 36, 18, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 35, 1, 0, 0, 0, 292, 297, 3, 40, 20, 0, 293, 294, 5, 34, 0, 0, 294, 296, 3, 40, 20, 0, 295, 293, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 37, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 301, 7, 2, 0, 0, 301, 39, 1, 0, 0, 0, 302, 307, 3, 42, 21, 0, 303, 304, 5, 36, 0, 0, 304, 306, 3, 42, 21, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 41, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 311, 7, 3, 0, 0, 311, 43, 1, 0, 0, 0, 312, 355, 5, 45, 0, 0, 313, 314, 3, 76, 38, 0, 314, 315, 5, 67, 0, 0, 315, 355, 1, 0, 0, 0, 316, 355, 3, 74, 37, 0, 317, 355, 3, 76, 38, 0, 318, 355, 3, 70, 35, 0, 319, 355, 5, 48, 0, 0, 320, 355, 3, 78, 39, 0, 321, 322, 5, 65, 0, 0, 322, 327, 3, 72, 36, 0, 323, 324, 5, 34, 0, 0, 324, 326, 3, 72, 36, 0, 325, 323, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 330, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 330, 331, 5, 66, 0, 0, 331, 355, 1, 0, 0, 0, 332, 333, 5, 65, 0, 0, 333, 338, 3, 70, 35, 0, 334, 335, 5, 34, 0, 0, 335, 337, 3, 70, 35, 0, 336, 334, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 341, 1, 0, 0, 0, 340, 338, 1, 0, 0, 0, 341, 342, 5, 66, 0, 0, 342, 355, 1, 0, 0, 0, 343, 344, 5, 65, 0, 0, 344, 349, 3, 78, 39, 0, 345, 346, 5, 34, 0, 0, 346, 348, 3, 78, 39, 0, 347, 345, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 352, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 352, 353, 5, 66, 0, 0, 353, 355, 1, 0, 0, 0, 354, 312, 1, 0, 0, 0, 354, 313, 1, 0, 0, 0, 354, 316, 1, 0, 0, 0, 354, 317, 1, 0, 0, 0, 354, 318, 1, 0, 0, 0, 354, 319, 1, 0, 0, 0, 354, 320, 1, 0, 0, 0, 354, 321, 1, 0, 0, 0, 354, 332, 1, 0, 0, 0, 354, 343, 1, 0, 0, 0, 355, 45, 1, 0, 0, 0, 356, 357, 5, 10, 0, 0, 357, 358, 5, 28, 0, 0, 358, 47, 1, 0, 0, 0, 359, 360, 5, 16, 0, 0, 360, 365, 3, 50, 25, 0, 361, 362, 5, 34, 0, 0, 362, 364, 3, 50, 25, 0, 363, 361, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 368, 370, 3, 10, 5, 0, 369, 371, 7, 4, 0, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 373, 5, 46, 0, 0, 373, 375, 7, 5, 0, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 51, 1, 0, 0, 0, 376, 377, 5, 9, 0, 0, 377, 382, 3, 38, 19, 0, 378, 379, 5, 34, 0, 0, 379, 381, 3, 38, 19, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 395, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 386, 5, 12, 0, 0, 386, 391, 3, 38, 19, 0, 387, 388, 5, 34, 0, 0, 388, 390, 3, 38, 19, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 395, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 376, 1, 0, 0, 0, 394, 385, 1, 0, 0, 0, 395, 53, 1, 0, 0, 0, 396, 397, 5, 2, 0, 0, 397, 402, 3, 38, 19, 0, 398, 399, 5, 34, 0, 0, 399, 401, 3, 38, 19, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 55, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 13, 0, 0, 406, 411, 3, 58, 29, 0, 407, 408, 5, 34, 0, 0, 408, 410, 3, 58, 29, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 57, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 3, 38, 19, 0, 415, 416, 5, 72, 0, 0, 416, 417, 3, 38, 19, 0, 417, 59, 1, 0, 0, 0, 418, 419, 5, 1, 0, 0, 419, 420, 3, 18, 9, 0, 420, 422, 3, 78, 39, 0, 421, 423, 3, 66, 33, 0, 422, 421, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 61, 1, 0, 0, 0, 424, 425, 5, 7, 0, 0, 425, 426, 3, 18, 9, 0, 426, 427, 3, 78, 39, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 11, 0, 0, 429, 430, 3, 38, 19, 0, 430, 65, 1, 0, 0, 0, 431, 436, 3, 68, 34, 0, 432, 433, 5, 34, 0, 0, 433, 435, 3, 68, 34, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 67, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 3, 42, 21, 0, 440, 441, 5, 33, 0, 0, 441, 442, 3, 44, 22, 0, 442, 69, 1, 0, 0, 0, 443, 444, 7, 6, 0, 0, 444, 71, 1, 0, 0, 0, 445, 448, 3, 74, 37, 0, 446, 448, 3, 76, 38, 0, 447, 445, 1, 0, 0, 0, 447, 446, 1, 0, 0, 0, 448, 73, 1, 0, 0, 0, 449, 451, 7, 0, 0, 0, 450, 449, 1, 0, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 5, 29, 0, 0, 453, 75, 1, 0, 0, 0, 454, 456, 7, 0, 0, 0, 455, 454, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 5, 28, 0, 0, 458, 77, 1, 0, 0, 0, 459, 460, 5, 27, 0, 0, 460, 79, 1, 0, 0, 0, 461, 462, 7, 7, 0, 0, 462, 81, 1, 0, 0, 0, 463, 464, 5, 5, 0, 0, 464, 465, 3, 84, 42, 0, 465, 83, 1, 0, 0, 0, 466, 467, 5, 65, 0, 0, 467, 468, 3, 2, 1, 0, 468, 469, 5, 66, 0, 0, 469, 85, 1, 0, 0, 0, 470, 471, 5, 15, 0, 0, 471, 475, 5, 52, 0, 0, 472, 473, 5, 15, 0, 0, 473, 475, 5, 53, 0, 0, 474, 470, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 87, 1, 0, 0, 0, 476, 477, 5, 3, 0, 0, 477, 480, 3, 38, 19, 0, 478, 479, 5, 74, 0, 0, 479, 481, 3, 38, 19, 0, 480, 478, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 491, 1, 0, 0, 0, 482, 483, 5, 75, 0, 0, 483, 488, 3, 90, 45, 0, 484, 485, 5, 34, 0, 0, 485, 487, 3, 90, 45, 0, 486, 484, 1, 0, 0, 0, 487, 490, 1, 0, 0, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 491, 482, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 89, 1, 0, 0, 0, 493, 494, 3, 38, 19, 0, 494, 495, 5, 33, 0, 0, 495, 497, 1, 0, 0, 0, 496, 493, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 3, 38, 19, 0, 499, 91, 1, 0, 0, 0, 51, 103, 110, 125, 137, 146, 154, 158, 166, 168, 173, 180, 185, 192, 198, 206, 208, 224, 227, 231, 241, 249, 257, 261, 270, 280, 284, 290, 297, 307, 327, 338, 349, 354, 365, 370, 374, 382, 391, 394, 402, 411, 422, 436, 447, 450, 455, 474, 480, 488, 491, 496] \ No newline at end of file +[4, 1, 81, 505, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 104, 8, 1, 10, 1, 12, 1, 107, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 113, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 128, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 140, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 147, 8, 5, 10, 5, 12, 5, 150, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 157, 8, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 169, 8, 5, 10, 5, 12, 5, 172, 9, 5, 1, 6, 1, 6, 3, 6, 176, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 183, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 188, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 195, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 201, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 209, 8, 8, 10, 8, 12, 8, 212, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 221, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 229, 8, 10, 10, 10, 12, 10, 232, 9, 10, 3, 10, 234, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 244, 8, 12, 10, 12, 12, 12, 247, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 254, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 260, 8, 14, 10, 14, 12, 14, 263, 9, 14, 1, 14, 3, 14, 266, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 273, 8, 15, 10, 15, 12, 15, 276, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 285, 8, 17, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 295, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 300, 8, 19, 10, 19, 12, 19, 303, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 310, 8, 21, 10, 21, 12, 21, 313, 9, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 330, 8, 23, 10, 23, 12, 23, 333, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 341, 8, 23, 10, 23, 12, 23, 344, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 352, 8, 23, 10, 23, 12, 23, 355, 9, 23, 1, 23, 1, 23, 3, 23, 359, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 368, 8, 25, 10, 25, 12, 25, 371, 9, 25, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 26, 1, 26, 3, 26, 379, 8, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 385, 8, 27, 10, 27, 12, 27, 388, 9, 27, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 394, 8, 27, 10, 27, 12, 27, 397, 9, 27, 3, 27, 399, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 405, 8, 28, 10, 28, 12, 28, 408, 9, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 414, 8, 29, 10, 29, 12, 29, 417, 9, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 3, 31, 427, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 1, 37, 3, 37, 452, 8, 37, 1, 38, 3, 38, 455, 8, 38, 1, 38, 1, 38, 1, 39, 3, 39, 460, 8, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 479, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 485, 8, 45, 1, 45, 1, 45, 1, 45, 1, 45, 5, 45, 491, 8, 45, 10, 45, 12, 45, 494, 9, 45, 3, 45, 496, 8, 45, 1, 46, 1, 46, 1, 46, 3, 46, 501, 8, 46, 1, 46, 1, 46, 1, 46, 0, 3, 2, 10, 16, 47, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 76, 77, 1, 0, 67, 68, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 54, 59, 535, 0, 94, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 4, 112, 1, 0, 0, 0, 6, 127, 1, 0, 0, 0, 8, 129, 1, 0, 0, 0, 10, 160, 1, 0, 0, 0, 12, 187, 1, 0, 0, 0, 14, 194, 1, 0, 0, 0, 16, 200, 1, 0, 0, 0, 18, 220, 1, 0, 0, 0, 20, 222, 1, 0, 0, 0, 22, 237, 1, 0, 0, 0, 24, 240, 1, 0, 0, 0, 26, 253, 1, 0, 0, 0, 28, 255, 1, 0, 0, 0, 30, 267, 1, 0, 0, 0, 32, 279, 1, 0, 0, 0, 34, 282, 1, 0, 0, 0, 36, 290, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 304, 1, 0, 0, 0, 42, 306, 1, 0, 0, 0, 44, 314, 1, 0, 0, 0, 46, 358, 1, 0, 0, 0, 48, 360, 1, 0, 0, 0, 50, 363, 1, 0, 0, 0, 52, 372, 1, 0, 0, 0, 54, 398, 1, 0, 0, 0, 56, 400, 1, 0, 0, 0, 58, 409, 1, 0, 0, 0, 60, 418, 1, 0, 0, 0, 62, 422, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 432, 1, 0, 0, 0, 68, 435, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 451, 1, 0, 0, 0, 76, 454, 1, 0, 0, 0, 78, 459, 1, 0, 0, 0, 80, 463, 1, 0, 0, 0, 82, 465, 1, 0, 0, 0, 84, 467, 1, 0, 0, 0, 86, 470, 1, 0, 0, 0, 88, 478, 1, 0, 0, 0, 90, 480, 1, 0, 0, 0, 92, 500, 1, 0, 0, 0, 94, 95, 3, 2, 1, 0, 95, 96, 5, 0, 0, 1, 96, 1, 1, 0, 0, 0, 97, 98, 6, 1, -1, 0, 98, 99, 3, 4, 2, 0, 99, 105, 1, 0, 0, 0, 100, 101, 10, 1, 0, 0, 101, 102, 5, 26, 0, 0, 102, 104, 3, 6, 3, 0, 103, 100, 1, 0, 0, 0, 104, 107, 1, 0, 0, 0, 105, 103, 1, 0, 0, 0, 105, 106, 1, 0, 0, 0, 106, 3, 1, 0, 0, 0, 107, 105, 1, 0, 0, 0, 108, 113, 3, 84, 42, 0, 109, 113, 3, 28, 14, 0, 110, 113, 3, 22, 11, 0, 111, 113, 3, 88, 44, 0, 112, 108, 1, 0, 0, 0, 112, 109, 1, 0, 0, 0, 112, 110, 1, 0, 0, 0, 112, 111, 1, 0, 0, 0, 113, 5, 1, 0, 0, 0, 114, 128, 3, 32, 16, 0, 115, 128, 3, 36, 18, 0, 116, 128, 3, 48, 24, 0, 117, 128, 3, 54, 27, 0, 118, 128, 3, 50, 25, 0, 119, 128, 3, 34, 17, 0, 120, 128, 3, 8, 4, 0, 121, 128, 3, 56, 28, 0, 122, 128, 3, 58, 29, 0, 123, 128, 3, 62, 31, 0, 124, 128, 3, 64, 32, 0, 125, 128, 3, 90, 45, 0, 126, 128, 3, 66, 33, 0, 127, 114, 1, 0, 0, 0, 127, 115, 1, 0, 0, 0, 127, 116, 1, 0, 0, 0, 127, 117, 1, 0, 0, 0, 127, 118, 1, 0, 0, 0, 127, 119, 1, 0, 0, 0, 127, 120, 1, 0, 0, 0, 127, 121, 1, 0, 0, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 7, 1, 0, 0, 0, 129, 130, 5, 18, 0, 0, 130, 131, 3, 10, 5, 0, 131, 9, 1, 0, 0, 0, 132, 133, 6, 5, -1, 0, 133, 134, 5, 44, 0, 0, 134, 161, 3, 10, 5, 7, 135, 161, 3, 14, 7, 0, 136, 161, 3, 12, 6, 0, 137, 139, 3, 14, 7, 0, 138, 140, 5, 44, 0, 0, 139, 138, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 1, 0, 0, 0, 141, 142, 5, 41, 0, 0, 142, 143, 5, 40, 0, 0, 143, 148, 3, 14, 7, 0, 144, 145, 5, 34, 0, 0, 145, 147, 3, 14, 7, 0, 146, 144, 1, 0, 0, 0, 147, 150, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 151, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 151, 152, 5, 50, 0, 0, 152, 161, 1, 0, 0, 0, 153, 154, 3, 14, 7, 0, 154, 156, 5, 42, 0, 0, 155, 157, 5, 44, 0, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 5, 45, 0, 0, 159, 161, 1, 0, 0, 0, 160, 132, 1, 0, 0, 0, 160, 135, 1, 0, 0, 0, 160, 136, 1, 0, 0, 0, 160, 137, 1, 0, 0, 0, 160, 153, 1, 0, 0, 0, 161, 170, 1, 0, 0, 0, 162, 163, 10, 4, 0, 0, 163, 164, 5, 31, 0, 0, 164, 169, 3, 10, 5, 5, 165, 166, 10, 3, 0, 0, 166, 167, 5, 47, 0, 0, 167, 169, 3, 10, 5, 4, 168, 162, 1, 0, 0, 0, 168, 165, 1, 0, 0, 0, 169, 172, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 11, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 175, 3, 14, 7, 0, 174, 176, 5, 44, 0, 0, 175, 174, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 178, 5, 43, 0, 0, 178, 179, 3, 80, 40, 0, 179, 188, 1, 0, 0, 0, 180, 182, 3, 14, 7, 0, 181, 183, 5, 44, 0, 0, 182, 181, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 5, 49, 0, 0, 185, 186, 3, 80, 40, 0, 186, 188, 1, 0, 0, 0, 187, 173, 1, 0, 0, 0, 187, 180, 1, 0, 0, 0, 188, 13, 1, 0, 0, 0, 189, 195, 3, 16, 8, 0, 190, 191, 3, 16, 8, 0, 191, 192, 3, 82, 41, 0, 192, 193, 3, 16, 8, 0, 193, 195, 1, 0, 0, 0, 194, 189, 1, 0, 0, 0, 194, 190, 1, 0, 0, 0, 195, 15, 1, 0, 0, 0, 196, 197, 6, 8, -1, 0, 197, 201, 3, 18, 9, 0, 198, 199, 7, 0, 0, 0, 199, 201, 3, 16, 8, 3, 200, 196, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 201, 210, 1, 0, 0, 0, 202, 203, 10, 2, 0, 0, 203, 204, 7, 1, 0, 0, 204, 209, 3, 16, 8, 3, 205, 206, 10, 1, 0, 0, 206, 207, 7, 0, 0, 0, 207, 209, 3, 16, 8, 2, 208, 202, 1, 0, 0, 0, 208, 205, 1, 0, 0, 0, 209, 212, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 17, 1, 0, 0, 0, 212, 210, 1, 0, 0, 0, 213, 221, 3, 46, 23, 0, 214, 221, 3, 42, 21, 0, 215, 221, 3, 20, 10, 0, 216, 217, 5, 40, 0, 0, 217, 218, 3, 10, 5, 0, 218, 219, 5, 50, 0, 0, 219, 221, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 220, 214, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 220, 216, 1, 0, 0, 0, 221, 19, 1, 0, 0, 0, 222, 223, 3, 44, 22, 0, 223, 233, 5, 40, 0, 0, 224, 234, 5, 62, 0, 0, 225, 230, 3, 10, 5, 0, 226, 227, 5, 34, 0, 0, 227, 229, 3, 10, 5, 0, 228, 226, 1, 0, 0, 0, 229, 232, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 233, 224, 1, 0, 0, 0, 233, 225, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 236, 5, 50, 0, 0, 236, 21, 1, 0, 0, 0, 237, 238, 5, 14, 0, 0, 238, 239, 3, 24, 12, 0, 239, 23, 1, 0, 0, 0, 240, 245, 3, 26, 13, 0, 241, 242, 5, 34, 0, 0, 242, 244, 3, 26, 13, 0, 243, 241, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 25, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 248, 254, 3, 10, 5, 0, 249, 250, 3, 42, 21, 0, 250, 251, 5, 33, 0, 0, 251, 252, 3, 10, 5, 0, 252, 254, 1, 0, 0, 0, 253, 248, 1, 0, 0, 0, 253, 249, 1, 0, 0, 0, 254, 27, 1, 0, 0, 0, 255, 256, 5, 6, 0, 0, 256, 261, 3, 40, 20, 0, 257, 258, 5, 34, 0, 0, 258, 260, 3, 40, 20, 0, 259, 257, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 266, 3, 30, 15, 0, 265, 264, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 29, 1, 0, 0, 0, 267, 268, 5, 65, 0, 0, 268, 269, 5, 73, 0, 0, 269, 274, 3, 40, 20, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 40, 20, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 277, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 278, 5, 66, 0, 0, 278, 31, 1, 0, 0, 0, 279, 280, 5, 4, 0, 0, 280, 281, 3, 24, 12, 0, 281, 33, 1, 0, 0, 0, 282, 284, 5, 17, 0, 0, 283, 285, 3, 24, 12, 0, 284, 283, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 287, 5, 30, 0, 0, 287, 289, 3, 38, 19, 0, 288, 286, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 35, 1, 0, 0, 0, 290, 291, 5, 8, 0, 0, 291, 294, 3, 24, 12, 0, 292, 293, 5, 30, 0, 0, 293, 295, 3, 38, 19, 0, 294, 292, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 37, 1, 0, 0, 0, 296, 301, 3, 42, 21, 0, 297, 298, 5, 34, 0, 0, 298, 300, 3, 42, 21, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 39, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 7, 2, 0, 0, 305, 41, 1, 0, 0, 0, 306, 311, 3, 44, 22, 0, 307, 308, 5, 36, 0, 0, 308, 310, 3, 44, 22, 0, 309, 307, 1, 0, 0, 0, 310, 313, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 43, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 314, 315, 7, 3, 0, 0, 315, 45, 1, 0, 0, 0, 316, 359, 5, 45, 0, 0, 317, 318, 3, 78, 39, 0, 318, 319, 5, 67, 0, 0, 319, 359, 1, 0, 0, 0, 320, 359, 3, 76, 38, 0, 321, 359, 3, 78, 39, 0, 322, 359, 3, 72, 36, 0, 323, 359, 5, 48, 0, 0, 324, 359, 3, 80, 40, 0, 325, 326, 5, 65, 0, 0, 326, 331, 3, 74, 37, 0, 327, 328, 5, 34, 0, 0, 328, 330, 3, 74, 37, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 334, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 5, 66, 0, 0, 335, 359, 1, 0, 0, 0, 336, 337, 5, 65, 0, 0, 337, 342, 3, 72, 36, 0, 338, 339, 5, 34, 0, 0, 339, 341, 3, 72, 36, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 345, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 66, 0, 0, 346, 359, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 353, 3, 80, 40, 0, 349, 350, 5, 34, 0, 0, 350, 352, 3, 80, 40, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 357, 5, 66, 0, 0, 357, 359, 1, 0, 0, 0, 358, 316, 1, 0, 0, 0, 358, 317, 1, 0, 0, 0, 358, 320, 1, 0, 0, 0, 358, 321, 1, 0, 0, 0, 358, 322, 1, 0, 0, 0, 358, 323, 1, 0, 0, 0, 358, 324, 1, 0, 0, 0, 358, 325, 1, 0, 0, 0, 358, 336, 1, 0, 0, 0, 358, 347, 1, 0, 0, 0, 359, 47, 1, 0, 0, 0, 360, 361, 5, 10, 0, 0, 361, 362, 5, 28, 0, 0, 362, 49, 1, 0, 0, 0, 363, 364, 5, 16, 0, 0, 364, 369, 3, 52, 26, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 52, 26, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 51, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 374, 3, 10, 5, 0, 373, 375, 7, 4, 0, 0, 374, 373, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 378, 1, 0, 0, 0, 376, 377, 5, 46, 0, 0, 377, 379, 7, 5, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 53, 1, 0, 0, 0, 380, 381, 5, 9, 0, 0, 381, 386, 3, 40, 20, 0, 382, 383, 5, 34, 0, 0, 383, 385, 3, 40, 20, 0, 384, 382, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 399, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 389, 390, 5, 12, 0, 0, 390, 395, 3, 40, 20, 0, 391, 392, 5, 34, 0, 0, 392, 394, 3, 40, 20, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 380, 1, 0, 0, 0, 398, 389, 1, 0, 0, 0, 399, 55, 1, 0, 0, 0, 400, 401, 5, 2, 0, 0, 401, 406, 3, 40, 20, 0, 402, 403, 5, 34, 0, 0, 403, 405, 3, 40, 20, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 57, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 410, 5, 13, 0, 0, 410, 415, 3, 60, 30, 0, 411, 412, 5, 34, 0, 0, 412, 414, 3, 60, 30, 0, 413, 411, 1, 0, 0, 0, 414, 417, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 59, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 418, 419, 3, 40, 20, 0, 419, 420, 5, 72, 0, 0, 420, 421, 3, 40, 20, 0, 421, 61, 1, 0, 0, 0, 422, 423, 5, 1, 0, 0, 423, 424, 3, 18, 9, 0, 424, 426, 3, 80, 40, 0, 425, 427, 3, 68, 34, 0, 426, 425, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 7, 0, 0, 429, 430, 3, 18, 9, 0, 430, 431, 3, 80, 40, 0, 431, 65, 1, 0, 0, 0, 432, 433, 5, 11, 0, 0, 433, 434, 3, 40, 20, 0, 434, 67, 1, 0, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 44, 22, 0, 444, 445, 5, 33, 0, 0, 445, 446, 3, 46, 23, 0, 446, 71, 1, 0, 0, 0, 447, 448, 7, 6, 0, 0, 448, 73, 1, 0, 0, 0, 449, 452, 3, 76, 38, 0, 450, 452, 3, 78, 39, 0, 451, 449, 1, 0, 0, 0, 451, 450, 1, 0, 0, 0, 452, 75, 1, 0, 0, 0, 453, 455, 7, 0, 0, 0, 454, 453, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 5, 29, 0, 0, 457, 77, 1, 0, 0, 0, 458, 460, 7, 0, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 5, 28, 0, 0, 462, 79, 1, 0, 0, 0, 463, 464, 5, 27, 0, 0, 464, 81, 1, 0, 0, 0, 465, 466, 7, 7, 0, 0, 466, 83, 1, 0, 0, 0, 467, 468, 5, 5, 0, 0, 468, 469, 3, 86, 43, 0, 469, 85, 1, 0, 0, 0, 470, 471, 5, 65, 0, 0, 471, 472, 3, 2, 1, 0, 472, 473, 5, 66, 0, 0, 473, 87, 1, 0, 0, 0, 474, 475, 5, 15, 0, 0, 475, 479, 5, 52, 0, 0, 476, 477, 5, 15, 0, 0, 477, 479, 5, 53, 0, 0, 478, 474, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 479, 89, 1, 0, 0, 0, 480, 481, 5, 3, 0, 0, 481, 484, 3, 40, 20, 0, 482, 483, 5, 74, 0, 0, 483, 485, 3, 40, 20, 0, 484, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 495, 1, 0, 0, 0, 486, 487, 5, 75, 0, 0, 487, 492, 3, 92, 46, 0, 488, 489, 5, 34, 0, 0, 489, 491, 3, 92, 46, 0, 490, 488, 1, 0, 0, 0, 491, 494, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 496, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 486, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 91, 1, 0, 0, 0, 497, 498, 3, 40, 20, 0, 498, 499, 5, 33, 0, 0, 499, 501, 1, 0, 0, 0, 500, 497, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 3, 40, 20, 0, 503, 93, 1, 0, 0, 0, 51, 105, 112, 127, 139, 148, 156, 160, 168, 170, 175, 182, 187, 194, 200, 208, 210, 220, 230, 233, 245, 253, 261, 265, 274, 284, 288, 294, 301, 311, 331, 342, 353, 358, 369, 374, 378, 386, 395, 398, 406, 415, 426, 440, 451, 454, 459, 478, 484, 492, 495, 500] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 79cca599aabac..49d9abcc087c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -34,28 +34,29 @@ public class EsqlBaseParser extends Parser { RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, - RULE_rowCommand = 10, RULE_fields = 11, RULE_field = 12, RULE_fromCommand = 13, - RULE_metadata = 14, RULE_evalCommand = 15, RULE_statsCommand = 16, RULE_inlinestatsCommand = 17, - RULE_grouping = 18, RULE_sourceIdentifier = 19, RULE_qualifiedName = 20, - RULE_identifier = 21, RULE_constant = 22, RULE_limitCommand = 23, RULE_sortCommand = 24, - RULE_orderExpression = 25, RULE_keepCommand = 26, RULE_dropCommand = 27, - RULE_renameCommand = 28, RULE_renameClause = 29, RULE_dissectCommand = 30, - RULE_grokCommand = 31, RULE_mvExpandCommand = 32, RULE_commandOptions = 33, - RULE_commandOption = 34, RULE_booleanValue = 35, RULE_numericValue = 36, - RULE_decimalValue = 37, RULE_integerValue = 38, RULE_string = 39, RULE_comparisonOperator = 40, - RULE_explainCommand = 41, RULE_subqueryExpression = 42, RULE_showCommand = 43, - RULE_enrichCommand = 44, RULE_enrichWithClause = 45; + RULE_functionExpression = 10, RULE_rowCommand = 11, RULE_fields = 12, + RULE_field = 13, RULE_fromCommand = 14, RULE_metadata = 15, RULE_evalCommand = 16, + RULE_statsCommand = 17, RULE_inlinestatsCommand = 18, RULE_grouping = 19, + RULE_sourceIdentifier = 20, RULE_qualifiedName = 21, RULE_identifier = 22, + RULE_constant = 23, RULE_limitCommand = 24, RULE_sortCommand = 25, RULE_orderExpression = 26, + RULE_keepCommand = 27, RULE_dropCommand = 28, RULE_renameCommand = 29, + RULE_renameClause = 30, RULE_dissectCommand = 31, RULE_grokCommand = 32, + RULE_mvExpandCommand = 33, RULE_commandOptions = 34, RULE_commandOption = 35, + RULE_booleanValue = 36, RULE_numericValue = 37, RULE_decimalValue = 38, + RULE_integerValue = 39, RULE_string = 40, RULE_comparisonOperator = 41, + RULE_explainCommand = 42, RULE_subqueryExpression = 43, RULE_showCommand = 44, + RULE_enrichCommand = 45, RULE_enrichWithClause = 46; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", - "primaryExpression", "rowCommand", "fields", "field", "fromCommand", - "metadata", "evalCommand", "statsCommand", "inlinestatsCommand", "grouping", - "sourceIdentifier", "qualifiedName", "identifier", "constant", "limitCommand", - "sortCommand", "orderExpression", "keepCommand", "dropCommand", "renameCommand", - "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", - "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", - "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "primaryExpression", "functionExpression", "rowCommand", "fields", "field", + "fromCommand", "metadata", "evalCommand", "statsCommand", "inlinestatsCommand", + "grouping", "sourceIdentifier", "qualifiedName", "identifier", "constant", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", "enrichWithClause" }; } @@ -138,7 +139,7 @@ public Vocabulary getVocabulary() { @Override public ATN getATN() { return _ATN; } - @SuppressWarnings("this-escape") public EsqlBaseParser(TokenStream input) { + public EsqlBaseParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } @@ -174,9 +175,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(92); + setState(94); query(0); - setState(93); + setState(95); match(EOF); } } @@ -212,7 +213,7 @@ public QueryContext query() { public ProcessingCommandContext processingCommand() { return getRuleContext(ProcessingCommandContext.class,0); } - @SuppressWarnings("this-escape") public CompositeQueryContext(QueryContext ctx) { copyFrom(ctx); } + public CompositeQueryContext(QueryContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCompositeQuery(this); @@ -232,7 +233,7 @@ public static class SingleCommandQueryContext extends QueryContext { public SourceCommandContext sourceCommand() { return getRuleContext(SourceCommandContext.class,0); } - @SuppressWarnings("this-escape") public SingleCommandQueryContext(QueryContext ctx) { copyFrom(ctx); } + public SingleCommandQueryContext(QueryContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterSingleCommandQuery(this); @@ -268,11 +269,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(96); + setState(98); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(103); + setState(105); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -283,16 +284,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(98); + setState(100); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(99); + setState(101); match(PIPE); - setState(100); + setState(102); processingCommand(); } } } - setState(105); + setState(107); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -346,34 +347,34 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(110); + setState(112); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(106); + setState(108); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(107); + setState(109); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(108); + setState(110); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(109); + setState(111); showCommand(); } break; @@ -456,27 +457,27 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(125); + setState(127); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(112); + setState(114); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(113); + setState(115); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(114); + setState(116); limitCommand(); } break; @@ -484,70 +485,70 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce case PROJECT: enterOuterAlt(_localctx, 4); { - setState(115); + setState(117); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(116); + setState(118); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(117); + setState(119); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(118); + setState(120); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(119); + setState(121); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(120); + setState(122); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(121); + setState(123); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(122); + setState(124); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(123); + setState(125); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(124); + setState(126); mvExpandCommand(); } break; @@ -597,9 +598,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(127); + setState(129); match(WHERE); - setState(128); + setState(130); booleanExpression(0); } } @@ -632,7 +633,7 @@ public static class LogicalNotContext extends BooleanExpressionContext { public BooleanExpressionContext booleanExpression() { return getRuleContext(BooleanExpressionContext.class,0); } - @SuppressWarnings("this-escape") public LogicalNotContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + public LogicalNotContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalNot(this); @@ -652,7 +653,7 @@ public static class BooleanDefaultContext extends BooleanExpressionContext { public ValueExpressionContext valueExpression() { return getRuleContext(ValueExpressionContext.class,0); } - @SuppressWarnings("this-escape") public BooleanDefaultContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + public BooleanDefaultContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanDefault(this); @@ -675,7 +676,7 @@ public ValueExpressionContext valueExpression() { public TerminalNode IS() { return getToken(EsqlBaseParser.IS, 0); } public TerminalNode NULL() { return getToken(EsqlBaseParser.NULL, 0); } public TerminalNode NOT() { return getToken(EsqlBaseParser.NOT, 0); } - @SuppressWarnings("this-escape") public IsNullContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + public IsNullContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIsNull(this); @@ -695,7 +696,7 @@ public static class RegexExpressionContext extends BooleanExpressionContext { public RegexBooleanExpressionContext regexBooleanExpression() { return getRuleContext(RegexBooleanExpressionContext.class,0); } - @SuppressWarnings("this-escape") public RegexExpressionContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + public RegexExpressionContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterRegexExpression(this); @@ -726,7 +727,7 @@ public ValueExpressionContext valueExpression(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } - @SuppressWarnings("this-escape") public LogicalInContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + public LogicalInContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalIn(this); @@ -754,7 +755,7 @@ public BooleanExpressionContext booleanExpression(int i) { } public TerminalNode AND() { return getToken(EsqlBaseParser.AND, 0); } public TerminalNode OR() { return getToken(EsqlBaseParser.OR, 0); } - @SuppressWarnings("this-escape") public LogicalBinaryContext(BooleanExpressionContext ctx) { copyFrom(ctx); } + public LogicalBinaryContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalBinary(this); @@ -786,7 +787,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(158); + setState(160); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -795,9 +796,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(131); + setState(133); match(NOT); - setState(132); + setState(134); booleanExpression(7); } break; @@ -806,7 +807,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(133); + setState(135); valueExpression(); } break; @@ -815,7 +816,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(134); + setState(136); regexBooleanExpression(); } break; @@ -824,41 +825,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(135); - valueExpression(); setState(137); + valueExpression(); + setState(139); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(136); + setState(138); match(NOT); } } - setState(139); + setState(141); match(IN); - setState(140); + setState(142); match(LP); - setState(141); + setState(143); valueExpression(); - setState(146); + setState(148); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(142); + setState(144); match(COMMA); - setState(143); + setState(145); valueExpression(); } } - setState(148); + setState(150); _errHandler.sync(this); _la = _input.LA(1); } - setState(149); + setState(151); match(RP); } break; @@ -867,27 +868,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(151); + setState(153); valueExpression(); - setState(152); - match(IS); setState(154); + match(IS); + setState(156); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(153); + setState(155); match(NOT); } } - setState(156); + setState(158); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(168); + setState(170); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -895,7 +896,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(166); + setState(168); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -903,11 +904,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(160); + setState(162); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(161); + setState(163); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(162); + setState(164); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -916,18 +917,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(163); + setState(165); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(164); + setState(166); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(165); + setState(167); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(170); + setState(172); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -981,48 +982,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(185); + setState(187); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(171); - valueExpression(); setState(173); + valueExpression(); + setState(175); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(172); + setState(174); match(NOT); } } - setState(175); + setState(177); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(176); + setState(178); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(178); - valueExpression(); setState(180); + valueExpression(); + setState(182); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(179); + setState(181); match(NOT); } } - setState(182); + setState(184); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(183); + setState(185); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1056,7 +1057,7 @@ public static class ValueExpressionDefaultContext extends ValueExpressionContext public OperatorExpressionContext operatorExpression() { return getRuleContext(OperatorExpressionContext.class,0); } - @SuppressWarnings("this-escape") public ValueExpressionDefaultContext(ValueExpressionContext ctx) { copyFrom(ctx); } + public ValueExpressionDefaultContext(ValueExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterValueExpressionDefault(this); @@ -1084,7 +1085,7 @@ public List operatorExpression() { public OperatorExpressionContext operatorExpression(int i) { return getRuleContext(OperatorExpressionContext.class,i); } - @SuppressWarnings("this-escape") public ComparisonContext(ValueExpressionContext ctx) { copyFrom(ctx); } + public ComparisonContext(ValueExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterComparison(this); @@ -1104,14 +1105,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(192); + setState(194); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(187); + setState(189); operatorExpression(0); } break; @@ -1119,11 +1120,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(188); + setState(190); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(189); + setState(191); comparisonOperator(); - setState(190); + setState(192); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1157,7 +1158,7 @@ public static class OperatorExpressionDefaultContext extends OperatorExpressionC public PrimaryExpressionContext primaryExpression() { return getRuleContext(PrimaryExpressionContext.class,0); } - @SuppressWarnings("this-escape") public OperatorExpressionDefaultContext(OperatorExpressionContext ctx) { copyFrom(ctx); } + public OperatorExpressionDefaultContext(OperatorExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterOperatorExpressionDefault(this); @@ -1188,7 +1189,7 @@ public OperatorExpressionContext operatorExpression(int i) { public TerminalNode PERCENT() { return getToken(EsqlBaseParser.PERCENT, 0); } public TerminalNode PLUS() { return getToken(EsqlBaseParser.PLUS, 0); } public TerminalNode MINUS() { return getToken(EsqlBaseParser.MINUS, 0); } - @SuppressWarnings("this-escape") public ArithmeticBinaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); } + public ArithmeticBinaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterArithmeticBinary(this); @@ -1211,7 +1212,7 @@ public OperatorExpressionContext operatorExpression() { } public TerminalNode MINUS() { return getToken(EsqlBaseParser.MINUS, 0); } public TerminalNode PLUS() { return getToken(EsqlBaseParser.PLUS, 0); } - @SuppressWarnings("this-escape") public ArithmeticUnaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); } + public ArithmeticUnaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterArithmeticUnary(this); @@ -1243,7 +1244,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(198); + setState(200); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1252,7 +1253,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(195); + setState(197); primaryExpression(); } break; @@ -1261,7 +1262,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(196); + setState(198); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1272,13 +1273,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(197); + setState(199); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(208); + setState(210); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1286,7 +1287,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(206); + setState(208); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1294,9 +1295,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(200); + setState(202); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(201); + setState(203); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0) ) { @@ -1307,7 +1308,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(202); + setState(204); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1316,9 +1317,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(203); + setState(205); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(204); + setState(206); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1329,14 +1330,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(205); + setState(207); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(210); + setState(212); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1370,7 +1371,7 @@ public static class DereferenceContext extends PrimaryExpressionContext { public QualifiedNameContext qualifiedName() { return getRuleContext(QualifiedNameContext.class,0); } - @SuppressWarnings("this-escape") public DereferenceContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + public DereferenceContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDereference(this); @@ -1390,7 +1391,7 @@ public static class ConstantDefaultContext extends PrimaryExpressionContext { public ConstantContext constant() { return getRuleContext(ConstantContext.class,0); } - @SuppressWarnings("this-escape") public ConstantDefaultContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + public ConstantDefaultContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterConstantDefault(this); @@ -1412,7 +1413,7 @@ public BooleanExpressionContext booleanExpression() { return getRuleContext(BooleanExpressionContext.class,0); } public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); } - @SuppressWarnings("this-escape") public ParenthesizedExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + public ParenthesizedExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterParenthesizedExpression(this); @@ -1428,34 +1429,22 @@ public T accept(ParseTreeVisitor visitor) { } } @SuppressWarnings("CheckReturnValue") - public static class FunctionExpressionContext extends PrimaryExpressionContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } - public TerminalNode LP() { return getToken(EsqlBaseParser.LP, 0); } - public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); } - public List booleanExpression() { - return getRuleContexts(BooleanExpressionContext.class); + public static class FunctionContext extends PrimaryExpressionContext { + public FunctionExpressionContext functionExpression() { + return getRuleContext(FunctionExpressionContext.class,0); } - public BooleanExpressionContext booleanExpression(int i) { - return getRuleContext(BooleanExpressionContext.class,i); - } - public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(EsqlBaseParser.COMMA, i); - } - @SuppressWarnings("this-escape") public FunctionExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + public FunctionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunctionExpression(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunction(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunctionExpression(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunction(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFunctionExpression(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFunction(this); else return visitor.visitChildren(this); } } @@ -1463,16 +1452,15 @@ public T accept(ParseTreeVisitor visitor) { public final PrimaryExpressionContext primaryExpression() throws RecognitionException { PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); enterRule(_localctx, 18, RULE_primaryExpression); - int _la; try { - setState(231); + setState(220); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(211); + setState(213); constant(); } break; @@ -1480,60 +1468,144 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(212); + setState(214); qualifiedName(); } break; case 3: - _localctx = new ParenthesizedExpressionContext(_localctx); + _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(213); - match(LP); - setState(214); - booleanExpression(0); setState(215); - match(RP); + functionExpression(); } break; case 4: - _localctx = new FunctionExpressionContext(_localctx); + _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 4); { + setState(216); + match(LP); setState(217); - identifier(); + booleanExpression(0); setState(218); - match(LP); - setState(227); + match(RP); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class FunctionExpressionContext extends ParserRuleContext { + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + public TerminalNode LP() { return getToken(EsqlBaseParser.LP, 0); } + public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); } + public TerminalNode ASTERISK() { return getToken(EsqlBaseParser.ASTERISK, 0); } + public List booleanExpression() { + return getRuleContexts(BooleanExpressionContext.class); + } + public BooleanExpressionContext booleanExpression(int i) { + return getRuleContext(BooleanExpressionContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + public FunctionExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_functionExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunctionExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunctionExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFunctionExpression(this); + else return visitor.visitChildren(this); + } + } + + public final FunctionExpressionContext functionExpression() throws RecognitionException { + FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_functionExpression); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(222); + identifier(); + setState(223); + match(LP); + setState(233); + _errHandler.sync(this); + switch (_input.LA(1)) { + case ASTERISK: + { + setState(224); + match(ASTERISK); + } + break; + case STRING: + case INTEGER_LITERAL: + case DECIMAL_LITERAL: + case FALSE: + case LP: + case NOT: + case NULL: + case PARAM: + case TRUE: + case PLUS: + case MINUS: + case OPENING_BRACKET: + case UNQUOTED_IDENTIFIER: + case QUOTED_IDENTIFIER: + { + { + setState(225); + booleanExpression(0); + setState(230); _errHandler.sync(this); _la = _input.LA(1); - if ((((_la - 27)) & ~0x3f) == 0 && ((1L << (_la - 27)) & 3599201870855L) != 0) { + while (_la==COMMA) { + { { - setState(219); + setState(226); + match(COMMA); + setState(227); booleanExpression(0); - setState(224); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(220); - match(COMMA); - setState(221); - booleanExpression(0); - } - } - setState(226); - _errHandler.sync(this); - _la = _input.LA(1); } } + setState(232); + _errHandler.sync(this); + _la = _input.LA(1); + } } - - setState(229); - match(RP); } break; + case RP: + break; + default: + break; + } + setState(235); + match(RP); } } catch (RecognitionException re) { @@ -1574,13 +1646,13 @@ public T accept(ParseTreeVisitor visitor) { public final RowCommandContext rowCommand() throws RecognitionException { RowCommandContext _localctx = new RowCommandContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_rowCommand); + enterRule(_localctx, 22, RULE_rowCommand); try { enterOuterAlt(_localctx, 1); { - setState(233); + setState(237); match(ROW); - setState(234); + setState(238); fields(); } } @@ -1628,28 +1700,28 @@ public T accept(ParseTreeVisitor visitor) { public final FieldsContext fields() throws RecognitionException { FieldsContext _localctx = new FieldsContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_fields); + enterRule(_localctx, 24, RULE_fields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(236); + setState(240); field(); - setState(241); + setState(245); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(237); + setState(241); match(COMMA); - setState(238); + setState(242); field(); } } } - setState(243); + setState(247); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1696,26 +1768,26 @@ public T accept(ParseTreeVisitor visitor) { public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_field); + enterRule(_localctx, 26, RULE_field); try { - setState(249); + setState(253); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(244); + setState(248); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(245); + setState(249); qualifiedName(); - setState(246); + setState(250); match(ASSIGN); - setState(247); + setState(251); booleanExpression(0); } break; @@ -1769,39 +1841,39 @@ public T accept(ParseTreeVisitor visitor) { public final FromCommandContext fromCommand() throws RecognitionException { FromCommandContext _localctx = new FromCommandContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_fromCommand); + enterRule(_localctx, 28, RULE_fromCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(251); + setState(255); match(FROM); - setState(252); + setState(256); sourceIdentifier(); - setState(257); + setState(261); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(253); + setState(257); match(COMMA); - setState(254); + setState(258); sourceIdentifier(); } } } - setState(259); + setState(263); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } - setState(261); + setState(265); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(260); + setState(264); metadata(); } break; @@ -1855,34 +1927,34 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_metadata); + enterRule(_localctx, 30, RULE_metadata); int _la; try { enterOuterAlt(_localctx, 1); { - setState(263); + setState(267); match(OPENING_BRACKET); - setState(264); + setState(268); match(METADATA); - setState(265); + setState(269); sourceIdentifier(); - setState(270); + setState(274); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(266); + setState(270); match(COMMA); - setState(267); + setState(271); sourceIdentifier(); } } - setState(272); + setState(276); _errHandler.sync(this); _la = _input.LA(1); } - setState(273); + setState(277); match(CLOSING_BRACKET); } } @@ -1924,13 +1996,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_evalCommand); + enterRule(_localctx, 32, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(275); + setState(279); match(EVAL); - setState(276); + setState(280); fields(); } } @@ -1976,30 +2048,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_statsCommand); + enterRule(_localctx, 34, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(278); + setState(282); match(STATS); - setState(280); + setState(284); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(279); + setState(283); fields(); } break; } - setState(284); + setState(288); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { - setState(282); + setState(286); match(BY); - setState(283); + setState(287); grouping(); } break; @@ -2048,22 +2120,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_inlinestatsCommand); + enterRule(_localctx, 36, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(286); + setState(290); match(INLINESTATS); - setState(287); + setState(291); fields(); - setState(290); + setState(294); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(288); + setState(292); match(BY); - setState(289); + setState(293); grouping(); } break; @@ -2114,28 +2186,28 @@ public T accept(ParseTreeVisitor visitor) { public final GroupingContext grouping() throws RecognitionException { GroupingContext _localctx = new GroupingContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_grouping); + enterRule(_localctx, 38, RULE_grouping); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(292); + setState(296); qualifiedName(); - setState(297); + setState(301); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(293); + setState(297); match(COMMA); - setState(294); + setState(298); qualifiedName(); } } } - setState(299); + setState(303); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2177,12 +2249,12 @@ public T accept(ParseTreeVisitor visitor) { public final SourceIdentifierContext sourceIdentifier() throws RecognitionException { SourceIdentifierContext _localctx = new SourceIdentifierContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_sourceIdentifier); + enterRule(_localctx, 40, RULE_sourceIdentifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(300); + setState(304); _la = _input.LA(1); if ( !(_la==SRC_UNQUOTED_IDENTIFIER || _la==SRC_QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2238,28 +2310,28 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_qualifiedName); + enterRule(_localctx, 42, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(302); + setState(306); identifier(); - setState(307); + setState(311); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(303); + setState(307); match(DOT); - setState(304); + setState(308); identifier(); } } } - setState(309); + setState(313); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2301,12 +2373,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_identifier); + enterRule(_localctx, 44, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(310); + setState(314); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2355,7 +2427,7 @@ public BooleanValueContext booleanValue(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } - @SuppressWarnings("this-escape") public BooleanArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public BooleanArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanArrayLiteral(this); @@ -2375,7 +2447,7 @@ public static class DecimalLiteralContext extends ConstantContext { public DecimalValueContext decimalValue() { return getRuleContext(DecimalValueContext.class,0); } - @SuppressWarnings("this-escape") public DecimalLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public DecimalLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDecimalLiteral(this); @@ -2393,7 +2465,7 @@ public T accept(ParseTreeVisitor visitor) { @SuppressWarnings("CheckReturnValue") public static class NullLiteralContext extends ConstantContext { public TerminalNode NULL() { return getToken(EsqlBaseParser.NULL, 0); } - @SuppressWarnings("this-escape") public NullLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public NullLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterNullLiteral(this); @@ -2414,7 +2486,7 @@ public IntegerValueContext integerValue() { return getRuleContext(IntegerValueContext.class,0); } public TerminalNode UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.UNQUOTED_IDENTIFIER, 0); } - @SuppressWarnings("this-escape") public QualifiedIntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public QualifiedIntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterQualifiedIntegerLiteral(this); @@ -2443,7 +2515,7 @@ public StringContext string(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } - @SuppressWarnings("this-escape") public StringArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public StringArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterStringArrayLiteral(this); @@ -2463,7 +2535,7 @@ public static class StringLiteralContext extends ConstantContext { public StringContext string() { return getRuleContext(StringContext.class,0); } - @SuppressWarnings("this-escape") public StringLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public StringLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterStringLiteral(this); @@ -2492,7 +2564,7 @@ public NumericValueContext numericValue(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } - @SuppressWarnings("this-escape") public NumericArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public NumericArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterNumericArrayLiteral(this); @@ -2510,7 +2582,7 @@ public T accept(ParseTreeVisitor visitor) { @SuppressWarnings("CheckReturnValue") public static class InputParamContext extends ConstantContext { public TerminalNode PARAM() { return getToken(EsqlBaseParser.PARAM, 0); } - @SuppressWarnings("this-escape") public InputParamContext(ConstantContext ctx) { copyFrom(ctx); } + public InputParamContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInputParam(this); @@ -2530,7 +2602,7 @@ public static class IntegerLiteralContext extends ConstantContext { public IntegerValueContext integerValue() { return getRuleContext(IntegerValueContext.class,0); } - @SuppressWarnings("this-escape") public IntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public IntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIntegerLiteral(this); @@ -2550,7 +2622,7 @@ public static class BooleanLiteralContext extends ConstantContext { public BooleanValueContext booleanValue() { return getRuleContext(BooleanValueContext.class,0); } - @SuppressWarnings("this-escape") public BooleanLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + public BooleanLiteralContext(ConstantContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanLiteral(this); @@ -2568,17 +2640,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_constant); + enterRule(_localctx, 46, RULE_constant); int _la; try { - setState(354); + setState(358); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(312); + setState(316); match(NULL); } break; @@ -2586,9 +2658,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(313); + setState(317); integerValue(); - setState(314); + setState(318); match(UNQUOTED_IDENTIFIER); } break; @@ -2596,7 +2668,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(316); + setState(320); decimalValue(); } break; @@ -2604,7 +2676,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(317); + setState(321); integerValue(); } break; @@ -2612,7 +2684,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(318); + setState(322); booleanValue(); } break; @@ -2620,7 +2692,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(319); + setState(323); match(PARAM); } break; @@ -2628,7 +2700,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(320); + setState(324); string(); } break; @@ -2636,27 +2708,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(321); + setState(325); match(OPENING_BRACKET); - setState(322); + setState(326); numericValue(); - setState(327); + setState(331); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(323); + setState(327); match(COMMA); - setState(324); + setState(328); numericValue(); } } - setState(329); + setState(333); _errHandler.sync(this); _la = _input.LA(1); } - setState(330); + setState(334); match(CLOSING_BRACKET); } break; @@ -2664,27 +2736,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(332); + setState(336); match(OPENING_BRACKET); - setState(333); + setState(337); booleanValue(); - setState(338); + setState(342); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(334); + setState(338); match(COMMA); - setState(335); + setState(339); booleanValue(); } } - setState(340); + setState(344); _errHandler.sync(this); _la = _input.LA(1); } - setState(341); + setState(345); match(CLOSING_BRACKET); } break; @@ -2692,27 +2764,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(343); + setState(347); match(OPENING_BRACKET); - setState(344); + setState(348); string(); - setState(349); + setState(353); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(345); + setState(349); match(COMMA); - setState(346); + setState(350); string(); } } - setState(351); + setState(355); _errHandler.sync(this); _la = _input.LA(1); } - setState(352); + setState(356); match(CLOSING_BRACKET); } break; @@ -2754,13 +2826,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_limitCommand); + enterRule(_localctx, 48, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(356); + setState(360); match(LIMIT); - setState(357); + setState(361); match(INTEGER_LITERAL); } } @@ -2809,30 +2881,30 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_sortCommand); + enterRule(_localctx, 50, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(359); + setState(363); match(SORT); - setState(360); + setState(364); orderExpression(); - setState(365); + setState(369); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(361); + setState(365); match(COMMA); - setState(362); + setState(366); orderExpression(); } } } - setState(367); + setState(371); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } @@ -2882,19 +2954,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_orderExpression); + enterRule(_localctx, 52, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(368); + setState(372); booleanExpression(0); - setState(370); + setState(374); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(369); + setState(373); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -2908,14 +2980,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(374); + setState(378); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(372); + setState(376); match(NULLS); - setState(373); + setState(377); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -2977,34 +3049,34 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_keepCommand); + enterRule(_localctx, 54, RULE_keepCommand); try { int _alt; - setState(394); + setState(398); _errHandler.sync(this); switch (_input.LA(1)) { case KEEP: enterOuterAlt(_localctx, 1); { - setState(376); + setState(380); match(KEEP); - setState(377); + setState(381); sourceIdentifier(); - setState(382); + setState(386); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(378); + setState(382); match(COMMA); - setState(379); + setState(383); sourceIdentifier(); } } } - setState(384); + setState(388); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3013,25 +3085,25 @@ public final KeepCommandContext keepCommand() throws RecognitionException { case PROJECT: enterOuterAlt(_localctx, 2); { - setState(385); + setState(389); match(PROJECT); - setState(386); + setState(390); sourceIdentifier(); - setState(391); + setState(395); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(387); + setState(391); match(COMMA); - setState(388); + setState(392); sourceIdentifier(); } } } - setState(393); + setState(397); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3086,30 +3158,30 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_dropCommand); + enterRule(_localctx, 56, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(396); + setState(400); match(DROP); - setState(397); + setState(401); sourceIdentifier(); - setState(402); + setState(406); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(398); + setState(402); match(COMMA); - setState(399); + setState(403); sourceIdentifier(); } } } - setState(404); + setState(408); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); } @@ -3160,30 +3232,30 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_renameCommand); + enterRule(_localctx, 58, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(405); + setState(409); match(RENAME); - setState(406); + setState(410); renameClause(); - setState(411); + setState(415); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(407); + setState(411); match(COMMA); - setState(408); + setState(412); renameClause(); } } } - setState(413); + setState(417); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } @@ -3232,15 +3304,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_renameClause); + enterRule(_localctx, 60, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(414); + setState(418); ((RenameClauseContext)_localctx).oldName = sourceIdentifier(); - setState(415); + setState(419); match(AS); - setState(416); + setState(420); ((RenameClauseContext)_localctx).newName = sourceIdentifier(); } } @@ -3288,22 +3360,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_dissectCommand); + enterRule(_localctx, 62, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(418); + setState(422); match(DISSECT); - setState(419); + setState(423); primaryExpression(); - setState(420); + setState(424); string(); - setState(422); + setState(426); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(421); + setState(425); commandOptions(); } break; @@ -3351,15 +3423,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_grokCommand); + enterRule(_localctx, 64, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(424); + setState(428); match(GROK); - setState(425); + setState(429); primaryExpression(); - setState(426); + setState(430); string(); } } @@ -3401,13 +3473,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_mvExpandCommand); + enterRule(_localctx, 66, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(428); + setState(432); match(MV_EXPAND); - setState(429); + setState(433); sourceIdentifier(); } } @@ -3455,28 +3527,28 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_commandOptions); + enterRule(_localctx, 68, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(431); + setState(435); commandOption(); - setState(436); + setState(440); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,42,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(432); + setState(436); match(COMMA); - setState(433); + setState(437); commandOption(); } } } - setState(438); + setState(442); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,42,_ctx); } @@ -3523,15 +3595,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_commandOption); + enterRule(_localctx, 70, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(439); + setState(443); identifier(); - setState(440); + setState(444); match(ASSIGN); - setState(441); + setState(445); constant(); } } @@ -3571,12 +3643,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_booleanValue); + enterRule(_localctx, 72, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(443); + setState(447); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3628,22 +3700,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_numericValue); + enterRule(_localctx, 74, RULE_numericValue); try { - setState(447); + setState(451); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(445); + setState(449); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(446); + setState(450); integerValue(); } break; @@ -3686,17 +3758,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_decimalValue); + enterRule(_localctx, 76, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(450); + setState(454); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(449); + setState(453); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3709,7 +3781,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(452); + setState(456); match(DECIMAL_LITERAL); } } @@ -3750,17 +3822,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_integerValue); + enterRule(_localctx, 78, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(455); + setState(459); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(454); + setState(458); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3773,7 +3845,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(457); + setState(461); match(INTEGER_LITERAL); } } @@ -3812,11 +3884,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_string); + enterRule(_localctx, 80, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(459); + setState(463); match(STRING); } } @@ -3860,12 +3932,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_comparisonOperator); + enterRule(_localctx, 82, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(461); + setState(465); _la = _input.LA(1); if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 1134907106097364992L) != 0) ) { _errHandler.recoverInline(this); @@ -3915,13 +3987,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_explainCommand); + enterRule(_localctx, 84, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(463); + setState(467); match(EXPLAIN); - setState(464); + setState(468); subqueryExpression(); } } @@ -3964,15 +4036,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_subqueryExpression); + enterRule(_localctx, 86, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(466); + setState(470); match(OPENING_BRACKET); - setState(467); + setState(471); query(0); - setState(468); + setState(472); match(CLOSING_BRACKET); } } @@ -4003,7 +4075,7 @@ public void copyFrom(ShowCommandContext ctx) { public static class ShowInfoContext extends ShowCommandContext { public TerminalNode SHOW() { return getToken(EsqlBaseParser.SHOW, 0); } public TerminalNode INFO() { return getToken(EsqlBaseParser.INFO, 0); } - @SuppressWarnings("this-escape") public ShowInfoContext(ShowCommandContext ctx) { copyFrom(ctx); } + public ShowInfoContext(ShowCommandContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterShowInfo(this); @@ -4022,7 +4094,7 @@ public T accept(ParseTreeVisitor visitor) { public static class ShowFunctionsContext extends ShowCommandContext { public TerminalNode SHOW() { return getToken(EsqlBaseParser.SHOW, 0); } public TerminalNode FUNCTIONS() { return getToken(EsqlBaseParser.FUNCTIONS, 0); } - @SuppressWarnings("this-escape") public ShowFunctionsContext(ShowCommandContext ctx) { copyFrom(ctx); } + public ShowFunctionsContext(ShowCommandContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterShowFunctions(this); @@ -4040,18 +4112,18 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_showCommand); + enterRule(_localctx, 88, RULE_showCommand); try { - setState(474); + setState(478); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(470); + setState(474); match(SHOW); - setState(471); + setState(475); match(INFO); } break; @@ -4059,9 +4131,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(472); + setState(476); match(SHOW); - setState(473); + setState(477); match(FUNCTIONS); } break; @@ -4122,51 +4194,51 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_enrichCommand); + enterRule(_localctx, 90, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(476); + setState(480); match(ENRICH); - setState(477); + setState(481); ((EnrichCommandContext)_localctx).policyName = sourceIdentifier(); - setState(480); + setState(484); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(478); + setState(482); match(ON); - setState(479); + setState(483); ((EnrichCommandContext)_localctx).matchField = sourceIdentifier(); } break; } - setState(491); + setState(495); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(482); + setState(486); match(WITH); - setState(483); + setState(487); enrichWithClause(); - setState(488); + setState(492); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(484); + setState(488); match(COMMA); - setState(485); + setState(489); enrichWithClause(); } } } - setState(490); + setState(494); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } @@ -4218,23 +4290,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_enrichWithClause); + enterRule(_localctx, 92, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(500); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(493); + setState(497); ((EnrichWithClauseContext)_localctx).newName = sourceIdentifier(); - setState(494); + setState(498); match(ASSIGN); } break; } - setState(498); + setState(502); ((EnrichWithClauseContext)_localctx).enrichField = sourceIdentifier(); } } @@ -4287,7 +4359,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001Q\u01f5\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001Q\u01f9\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4300,316 +4372,319 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001f\b\u0001\n\u0001"+ - "\f\u0001i\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003"+ - "\u0002o\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0003\u0003~\b\u0003\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u008a\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u0091\b\u0005\n"+ - "\u0005\f\u0005\u0094\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0003\u0005\u009b\b\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u009f\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0005\u0005\u00a7\b\u0005\n\u0005\f\u0005\u00aa\t\u0005"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00ae\b\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00b5\b\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00ba\b\u0006\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00c1\b\u0007\u0001\b"+ - "\u0001\b\u0001\b\u0001\b\u0003\b\u00c7\b\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0005\b\u00cf\b\b\n\b\f\b\u00d2\t\b\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0005\t\u00df\b\t\n\t\f\t\u00e2\t\t\u0003\t\u00e4\b\t\u0001\t\u0001"+ - "\t\u0003\t\u00e8\b\t\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0005\u000b\u00f0\b\u000b\n\u000b\f\u000b\u00f3\t\u000b\u0001\f"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0003\f\u00fa\b\f\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0005\r\u0100\b\r\n\r\f\r\u0103\t\r\u0001\r\u0003\r\u0106\b"+ - "\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e"+ - "\u010d\b\u000e\n\u000e\f\u000e\u0110\t\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0003\u0010\u0119"+ - "\b\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u011d\b\u0010\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u0123\b\u0011\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0005\u0012\u0128\b\u0012\n\u0012\f\u0012\u012b"+ - "\t\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0005"+ - "\u0014\u0132\b\u0014\n\u0014\f\u0014\u0135\t\u0014\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0005\u0016\u0146\b\u0016\n\u0016\f\u0016\u0149\t\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005"+ - "\u0016\u0151\b\u0016\n\u0016\f\u0016\u0154\t\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015c\b\u0016"+ - "\n\u0016\f\u0016\u015f\t\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u0163"+ - "\b\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001"+ - "\u0018\u0001\u0018\u0005\u0018\u016c\b\u0018\n\u0018\f\u0018\u016f\t\u0018"+ - "\u0001\u0019\u0001\u0019\u0003\u0019\u0173\b\u0019\u0001\u0019\u0001\u0019"+ - "\u0003\u0019\u0177\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0005\u001a\u017d\b\u001a\n\u001a\f\u001a\u0180\t\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0186\b\u001a\n\u001a\f\u001a"+ - "\u0189\t\u001a\u0003\u001a\u018b\b\u001a\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0001\u001b\u0005\u001b\u0191\b\u001b\n\u001b\f\u001b\u0194\t\u001b"+ - "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u019a\b\u001c"+ - "\n\u001c\f\u001c\u019d\t\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0003\u001e\u01a7"+ - "\b\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0001 \u0001!\u0001!\u0001!\u0005!\u01b3\b!\n!\f!\u01b6\t!\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0003$\u01c0\b$\u0001%"+ - "\u0003%\u01c3\b%\u0001%\u0001%\u0001&\u0003&\u01c8\b&\u0001&\u0001&\u0001"+ - "\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ - "*\u0001+\u0001+\u0001+\u0001+\u0003+\u01db\b+\u0001,\u0001,\u0001,\u0001"+ - ",\u0003,\u01e1\b,\u0001,\u0001,\u0001,\u0001,\u0005,\u01e7\b,\n,\f,\u01ea"+ - "\t,\u0003,\u01ec\b,\u0001-\u0001-\u0001-\u0003-\u01f1\b-\u0001-\u0001"+ - "-\u0001-\u0000\u0003\u0002\n\u0010.\u0000\u0002\u0004\u0006\b\n\f\u000e"+ - "\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDF"+ - "HJLNPRTVXZ\u0000\b\u0001\u0000<=\u0001\u0000>@\u0001\u0000LM\u0001\u0000"+ - "CD\u0002\u0000 ##\u0001\u0000&\'\u0002\u0000%%33\u0001\u00006;\u0213"+ - "\u0000\\\u0001\u0000\u0000\u0000\u0002_\u0001\u0000\u0000\u0000\u0004"+ - "n\u0001\u0000\u0000\u0000\u0006}\u0001\u0000\u0000\u0000\b\u007f\u0001"+ - "\u0000\u0000\u0000\n\u009e\u0001\u0000\u0000\u0000\f\u00b9\u0001\u0000"+ - "\u0000\u0000\u000e\u00c0\u0001\u0000\u0000\u0000\u0010\u00c6\u0001\u0000"+ - "\u0000\u0000\u0012\u00e7\u0001\u0000\u0000\u0000\u0014\u00e9\u0001\u0000"+ - "\u0000\u0000\u0016\u00ec\u0001\u0000\u0000\u0000\u0018\u00f9\u0001\u0000"+ - "\u0000\u0000\u001a\u00fb\u0001\u0000\u0000\u0000\u001c\u0107\u0001\u0000"+ - "\u0000\u0000\u001e\u0113\u0001\u0000\u0000\u0000 \u0116\u0001\u0000\u0000"+ - "\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0124\u0001\u0000\u0000\u0000"+ - "&\u012c\u0001\u0000\u0000\u0000(\u012e\u0001\u0000\u0000\u0000*\u0136"+ - "\u0001\u0000\u0000\u0000,\u0162\u0001\u0000\u0000\u0000.\u0164\u0001\u0000"+ - "\u0000\u00000\u0167\u0001\u0000\u0000\u00002\u0170\u0001\u0000\u0000\u0000"+ - "4\u018a\u0001\u0000\u0000\u00006\u018c\u0001\u0000\u0000\u00008\u0195"+ - "\u0001\u0000\u0000\u0000:\u019e\u0001\u0000\u0000\u0000<\u01a2\u0001\u0000"+ - "\u0000\u0000>\u01a8\u0001\u0000\u0000\u0000@\u01ac\u0001\u0000\u0000\u0000"+ - "B\u01af\u0001\u0000\u0000\u0000D\u01b7\u0001\u0000\u0000\u0000F\u01bb"+ - "\u0001\u0000\u0000\u0000H\u01bf\u0001\u0000\u0000\u0000J\u01c2\u0001\u0000"+ - "\u0000\u0000L\u01c7\u0001\u0000\u0000\u0000N\u01cb\u0001\u0000\u0000\u0000"+ - "P\u01cd\u0001\u0000\u0000\u0000R\u01cf\u0001\u0000\u0000\u0000T\u01d2"+ - "\u0001\u0000\u0000\u0000V\u01da\u0001\u0000\u0000\u0000X\u01dc\u0001\u0000"+ - "\u0000\u0000Z\u01f0\u0001\u0000\u0000\u0000\\]\u0003\u0002\u0001\u0000"+ - "]^\u0005\u0000\u0000\u0001^\u0001\u0001\u0000\u0000\u0000_`\u0006\u0001"+ - "\uffff\uffff\u0000`a\u0003\u0004\u0002\u0000ag\u0001\u0000\u0000\u0000"+ - "bc\n\u0001\u0000\u0000cd\u0005\u001a\u0000\u0000df\u0003\u0006\u0003\u0000"+ - "eb\u0001\u0000\u0000\u0000fi\u0001\u0000\u0000\u0000ge\u0001\u0000\u0000"+ - "\u0000gh\u0001\u0000\u0000\u0000h\u0003\u0001\u0000\u0000\u0000ig\u0001"+ - "\u0000\u0000\u0000jo\u0003R)\u0000ko\u0003\u001a\r\u0000lo\u0003\u0014"+ - "\n\u0000mo\u0003V+\u0000nj\u0001\u0000\u0000\u0000nk\u0001\u0000\u0000"+ - "\u0000nl\u0001\u0000\u0000\u0000nm\u0001\u0000\u0000\u0000o\u0005\u0001"+ - "\u0000\u0000\u0000p~\u0003\u001e\u000f\u0000q~\u0003\"\u0011\u0000r~\u0003"+ - ".\u0017\u0000s~\u00034\u001a\u0000t~\u00030\u0018\u0000u~\u0003 \u0010"+ - "\u0000v~\u0003\b\u0004\u0000w~\u00036\u001b\u0000x~\u00038\u001c\u0000"+ - "y~\u0003<\u001e\u0000z~\u0003>\u001f\u0000{~\u0003X,\u0000|~\u0003@ \u0000"+ - "}p\u0001\u0000\u0000\u0000}q\u0001\u0000\u0000\u0000}r\u0001\u0000\u0000"+ - "\u0000}s\u0001\u0000\u0000\u0000}t\u0001\u0000\u0000\u0000}u\u0001\u0000"+ - "\u0000\u0000}v\u0001\u0000\u0000\u0000}w\u0001\u0000\u0000\u0000}x\u0001"+ - "\u0000\u0000\u0000}y\u0001\u0000\u0000\u0000}z\u0001\u0000\u0000\u0000"+ - "}{\u0001\u0000\u0000\u0000}|\u0001\u0000\u0000\u0000~\u0007\u0001\u0000"+ - "\u0000\u0000\u007f\u0080\u0005\u0012\u0000\u0000\u0080\u0081\u0003\n\u0005"+ - "\u0000\u0081\t\u0001\u0000\u0000\u0000\u0082\u0083\u0006\u0005\uffff\uffff"+ - "\u0000\u0083\u0084\u0005,\u0000\u0000\u0084\u009f\u0003\n\u0005\u0007"+ - "\u0085\u009f\u0003\u000e\u0007\u0000\u0086\u009f\u0003\f\u0006\u0000\u0087"+ - "\u0089\u0003\u000e\u0007\u0000\u0088\u008a\u0005,\u0000\u0000\u0089\u0088"+ - "\u0001\u0000\u0000\u0000\u0089\u008a\u0001\u0000\u0000\u0000\u008a\u008b"+ - "\u0001\u0000\u0000\u0000\u008b\u008c\u0005)\u0000\u0000\u008c\u008d\u0005"+ - "(\u0000\u0000\u008d\u0092\u0003\u000e\u0007\u0000\u008e\u008f\u0005\""+ - "\u0000\u0000\u008f\u0091\u0003\u000e\u0007\u0000\u0090\u008e\u0001\u0000"+ - "\u0000\u0000\u0091\u0094\u0001\u0000\u0000\u0000\u0092\u0090\u0001\u0000"+ - "\u0000\u0000\u0092\u0093\u0001\u0000\u0000\u0000\u0093\u0095\u0001\u0000"+ - "\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0095\u0096\u00052\u0000"+ - "\u0000\u0096\u009f\u0001\u0000\u0000\u0000\u0097\u0098\u0003\u000e\u0007"+ - "\u0000\u0098\u009a\u0005*\u0000\u0000\u0099\u009b\u0005,\u0000\u0000\u009a"+ - "\u0099\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000\u009b"+ - "\u009c\u0001\u0000\u0000\u0000\u009c\u009d\u0005-\u0000\u0000\u009d\u009f"+ - "\u0001\u0000\u0000\u0000\u009e\u0082\u0001\u0000\u0000\u0000\u009e\u0085"+ - "\u0001\u0000\u0000\u0000\u009e\u0086\u0001\u0000\u0000\u0000\u009e\u0087"+ - "\u0001\u0000\u0000\u0000\u009e\u0097\u0001\u0000\u0000\u0000\u009f\u00a8"+ - "\u0001\u0000\u0000\u0000\u00a0\u00a1\n\u0004\u0000\u0000\u00a1\u00a2\u0005"+ - "\u001f\u0000\u0000\u00a2\u00a7\u0003\n\u0005\u0005\u00a3\u00a4\n\u0003"+ - "\u0000\u0000\u00a4\u00a5\u0005/\u0000\u0000\u00a5\u00a7\u0003\n\u0005"+ - "\u0004\u00a6\u00a0\u0001\u0000\u0000\u0000\u00a6\u00a3\u0001\u0000\u0000"+ - "\u0000\u00a7\u00aa\u0001\u0000\u0000\u0000\u00a8\u00a6\u0001\u0000\u0000"+ - "\u0000\u00a8\u00a9\u0001\u0000\u0000\u0000\u00a9\u000b\u0001\u0000\u0000"+ - "\u0000\u00aa\u00a8\u0001\u0000\u0000\u0000\u00ab\u00ad\u0003\u000e\u0007"+ - "\u0000\u00ac\u00ae\u0005,\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000\u0000"+ - "\u00ad\u00ae\u0001\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000"+ - "\u00af\u00b0\u0005+\u0000\u0000\u00b0\u00b1\u0003N\'\u0000\u00b1\u00ba"+ - "\u0001\u0000\u0000\u0000\u00b2\u00b4\u0003\u000e\u0007\u0000\u00b3\u00b5"+ - "\u0005,\u0000\u0000\u00b4\u00b3\u0001\u0000\u0000\u0000\u00b4\u00b5\u0001"+ - "\u0000\u0000\u0000\u00b5\u00b6\u0001\u0000\u0000\u0000\u00b6\u00b7\u0005"+ - "1\u0000\u0000\u00b7\u00b8\u0003N\'\u0000\u00b8\u00ba\u0001\u0000\u0000"+ - "\u0000\u00b9\u00ab\u0001\u0000\u0000\u0000\u00b9\u00b2\u0001\u0000\u0000"+ - "\u0000\u00ba\r\u0001\u0000\u0000\u0000\u00bb\u00c1\u0003\u0010\b\u0000"+ - "\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00be\u0003P(\u0000\u00be\u00bf"+ - "\u0003\u0010\b\u0000\u00bf\u00c1\u0001\u0000\u0000\u0000\u00c0\u00bb\u0001"+ - "\u0000\u0000\u0000\u00c0\u00bc\u0001\u0000\u0000\u0000\u00c1\u000f\u0001"+ - "\u0000\u0000\u0000\u00c2\u00c3\u0006\b\uffff\uffff\u0000\u00c3\u00c7\u0003"+ - "\u0012\t\u0000\u00c4\u00c5\u0007\u0000\u0000\u0000\u00c5\u00c7\u0003\u0010"+ - "\b\u0003\u00c6\u00c2\u0001\u0000\u0000\u0000\u00c6\u00c4\u0001\u0000\u0000"+ - "\u0000\u00c7\u00d0\u0001\u0000\u0000\u0000\u00c8\u00c9\n\u0002\u0000\u0000"+ - "\u00c9\u00ca\u0007\u0001\u0000\u0000\u00ca\u00cf\u0003\u0010\b\u0003\u00cb"+ - "\u00cc\n\u0001\u0000\u0000\u00cc\u00cd\u0007\u0000\u0000\u0000\u00cd\u00cf"+ - "\u0003\u0010\b\u0002\u00ce\u00c8\u0001\u0000\u0000\u0000\u00ce\u00cb\u0001"+ - "\u0000\u0000\u0000\u00cf\u00d2\u0001\u0000\u0000\u0000\u00d0\u00ce\u0001"+ - "\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1\u0011\u0001"+ - "\u0000\u0000\u0000\u00d2\u00d0\u0001\u0000\u0000\u0000\u00d3\u00e8\u0003"+ - ",\u0016\u0000\u00d4\u00e8\u0003(\u0014\u0000\u00d5\u00d6\u0005(\u0000"+ - "\u0000\u00d6\u00d7\u0003\n\u0005\u0000\u00d7\u00d8\u00052\u0000\u0000"+ - "\u00d8\u00e8\u0001\u0000\u0000\u0000\u00d9\u00da\u0003*\u0015\u0000\u00da"+ - "\u00e3\u0005(\u0000\u0000\u00db\u00e0\u0003\n\u0005\u0000\u00dc\u00dd"+ - "\u0005\"\u0000\u0000\u00dd\u00df\u0003\n\u0005\u0000\u00de\u00dc\u0001"+ - "\u0000\u0000\u0000\u00df\u00e2\u0001\u0000\u0000\u0000\u00e0\u00de\u0001"+ - "\u0000\u0000\u0000\u00e0\u00e1\u0001\u0000\u0000\u0000\u00e1\u00e4\u0001"+ - "\u0000\u0000\u0000\u00e2\u00e0\u0001\u0000\u0000\u0000\u00e3\u00db\u0001"+ - "\u0000\u0000\u0000\u00e3\u00e4\u0001\u0000\u0000\u0000\u00e4\u00e5\u0001"+ - "\u0000\u0000\u0000\u00e5\u00e6\u00052\u0000\u0000\u00e6\u00e8\u0001\u0000"+ - "\u0000\u0000\u00e7\u00d3\u0001\u0000\u0000\u0000\u00e7\u00d4\u0001\u0000"+ - "\u0000\u0000\u00e7\u00d5\u0001\u0000\u0000\u0000\u00e7\u00d9\u0001\u0000"+ - "\u0000\u0000\u00e8\u0013\u0001\u0000\u0000\u0000\u00e9\u00ea\u0005\u000e"+ - "\u0000\u0000\u00ea\u00eb\u0003\u0016\u000b\u0000\u00eb\u0015\u0001\u0000"+ - "\u0000\u0000\u00ec\u00f1\u0003\u0018\f\u0000\u00ed\u00ee\u0005\"\u0000"+ - "\u0000\u00ee\u00f0\u0003\u0018\f\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000"+ - "\u00f0\u00f3\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ - "\u00f1\u00f2\u0001\u0000\u0000\u0000\u00f2\u0017\u0001\u0000\u0000\u0000"+ - "\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f4\u00fa\u0003\n\u0005\u0000\u00f5"+ - "\u00f6\u0003(\u0014\u0000\u00f6\u00f7\u0005!\u0000\u0000\u00f7\u00f8\u0003"+ - "\n\u0005\u0000\u00f8\u00fa\u0001\u0000\u0000\u0000\u00f9\u00f4\u0001\u0000"+ - "\u0000\u0000\u00f9\u00f5\u0001\u0000\u0000\u0000\u00fa\u0019\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0005\u0006\u0000\u0000\u00fc\u0101\u0003&\u0013"+ - "\u0000\u00fd\u00fe\u0005\"\u0000\u0000\u00fe\u0100\u0003&\u0013\u0000"+ - "\u00ff\u00fd\u0001\u0000\u0000\u0000\u0100\u0103\u0001\u0000\u0000\u0000"+ - "\u0101\u00ff\u0001\u0000\u0000\u0000\u0101\u0102\u0001\u0000\u0000\u0000"+ - "\u0102\u0105\u0001\u0000\u0000\u0000\u0103\u0101\u0001\u0000\u0000\u0000"+ - "\u0104\u0106\u0003\u001c\u000e\u0000\u0105\u0104\u0001\u0000\u0000\u0000"+ - "\u0105\u0106\u0001\u0000\u0000\u0000\u0106\u001b\u0001\u0000\u0000\u0000"+ - "\u0107\u0108\u0005A\u0000\u0000\u0108\u0109\u0005I\u0000\u0000\u0109\u010e"+ - "\u0003&\u0013\u0000\u010a\u010b\u0005\"\u0000\u0000\u010b\u010d\u0003"+ - "&\u0013\u0000\u010c\u010a\u0001\u0000\u0000\u0000\u010d\u0110\u0001\u0000"+ - "\u0000\u0000\u010e\u010c\u0001\u0000\u0000\u0000\u010e\u010f\u0001\u0000"+ - "\u0000\u0000\u010f\u0111\u0001\u0000\u0000\u0000\u0110\u010e\u0001\u0000"+ - "\u0000\u0000\u0111\u0112\u0005B\u0000\u0000\u0112\u001d\u0001\u0000\u0000"+ - "\u0000\u0113\u0114\u0005\u0004\u0000\u0000\u0114\u0115\u0003\u0016\u000b"+ - "\u0000\u0115\u001f\u0001\u0000\u0000\u0000\u0116\u0118\u0005\u0011\u0000"+ - "\u0000\u0117\u0119\u0003\u0016\u000b\u0000\u0118\u0117\u0001\u0000\u0000"+ - "\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000"+ - "\u0000\u011a\u011b\u0005\u001e\u0000\u0000\u011b\u011d\u0003$\u0012\u0000"+ - "\u011c\u011a\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000"+ - "\u011d!\u0001\u0000\u0000\u0000\u011e\u011f\u0005\b\u0000\u0000\u011f"+ - "\u0122\u0003\u0016\u000b\u0000\u0120\u0121\u0005\u001e\u0000\u0000\u0121"+ - "\u0123\u0003$\u0012\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0122\u0123"+ - "\u0001\u0000\u0000\u0000\u0123#\u0001\u0000\u0000\u0000\u0124\u0129\u0003"+ - "(\u0014\u0000\u0125\u0126\u0005\"\u0000\u0000\u0126\u0128\u0003(\u0014"+ - "\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u012b\u0001\u0000\u0000"+ - "\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u0129\u012a\u0001\u0000\u0000"+ - "\u0000\u012a%\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000"+ - "\u012c\u012d\u0007\u0002\u0000\u0000\u012d\'\u0001\u0000\u0000\u0000\u012e"+ - "\u0133\u0003*\u0015\u0000\u012f\u0130\u0005$\u0000\u0000\u0130\u0132\u0003"+ - "*\u0015\u0000\u0131\u012f\u0001\u0000\u0000\u0000\u0132\u0135\u0001\u0000"+ - "\u0000\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000"+ - "\u0000\u0000\u0134)\u0001\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000"+ - "\u0000\u0136\u0137\u0007\u0003\u0000\u0000\u0137+\u0001\u0000\u0000\u0000"+ - "\u0138\u0163\u0005-\u0000\u0000\u0139\u013a\u0003L&\u0000\u013a\u013b"+ - "\u0005C\u0000\u0000\u013b\u0163\u0001\u0000\u0000\u0000\u013c\u0163\u0003"+ - "J%\u0000\u013d\u0163\u0003L&\u0000\u013e\u0163\u0003F#\u0000\u013f\u0163"+ - "\u00050\u0000\u0000\u0140\u0163\u0003N\'\u0000\u0141\u0142\u0005A\u0000"+ - "\u0000\u0142\u0147\u0003H$\u0000\u0143\u0144\u0005\"\u0000\u0000\u0144"+ - "\u0146\u0003H$\u0000\u0145\u0143\u0001\u0000\u0000\u0000\u0146\u0149\u0001"+ - "\u0000\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0147\u0148\u0001"+ - "\u0000\u0000\u0000\u0148\u014a\u0001\u0000\u0000\u0000\u0149\u0147\u0001"+ - "\u0000\u0000\u0000\u014a\u014b\u0005B\u0000\u0000\u014b\u0163\u0001\u0000"+ - "\u0000\u0000\u014c\u014d\u0005A\u0000\u0000\u014d\u0152\u0003F#\u0000"+ - "\u014e\u014f\u0005\"\u0000\u0000\u014f\u0151\u0003F#\u0000\u0150\u014e"+ - "\u0001\u0000\u0000\u0000\u0151\u0154\u0001\u0000\u0000\u0000\u0152\u0150"+ - "\u0001\u0000\u0000\u0000\u0152\u0153\u0001\u0000\u0000\u0000\u0153\u0155"+ - "\u0001\u0000\u0000\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155\u0156"+ - "\u0005B\u0000\u0000\u0156\u0163\u0001\u0000\u0000\u0000\u0157\u0158\u0005"+ - "A\u0000\u0000\u0158\u015d\u0003N\'\u0000\u0159\u015a\u0005\"\u0000\u0000"+ - "\u015a\u015c\u0003N\'\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015c"+ - "\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015d"+ - "\u015e\u0001\u0000\u0000\u0000\u015e\u0160\u0001\u0000\u0000\u0000\u015f"+ - "\u015d\u0001\u0000\u0000\u0000\u0160\u0161\u0005B\u0000\u0000\u0161\u0163"+ - "\u0001\u0000\u0000\u0000\u0162\u0138\u0001\u0000\u0000\u0000\u0162\u0139"+ - "\u0001\u0000\u0000\u0000\u0162\u013c\u0001\u0000\u0000\u0000\u0162\u013d"+ - "\u0001\u0000\u0000\u0000\u0162\u013e\u0001\u0000\u0000\u0000\u0162\u013f"+ - "\u0001\u0000\u0000\u0000\u0162\u0140\u0001\u0000\u0000\u0000\u0162\u0141"+ - "\u0001\u0000\u0000\u0000\u0162\u014c\u0001\u0000\u0000\u0000\u0162\u0157"+ - "\u0001\u0000\u0000\u0000\u0163-\u0001\u0000\u0000\u0000\u0164\u0165\u0005"+ - "\n\u0000\u0000\u0165\u0166\u0005\u001c\u0000\u0000\u0166/\u0001\u0000"+ - "\u0000\u0000\u0167\u0168\u0005\u0010\u0000\u0000\u0168\u016d\u00032\u0019"+ - "\u0000\u0169\u016a\u0005\"\u0000\u0000\u016a\u016c\u00032\u0019\u0000"+ - "\u016b\u0169\u0001\u0000\u0000\u0000\u016c\u016f\u0001\u0000\u0000\u0000"+ - "\u016d\u016b\u0001\u0000\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000"+ - "\u016e1\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170"+ - "\u0172\u0003\n\u0005\u0000\u0171\u0173\u0007\u0004\u0000\u0000\u0172\u0171"+ - "\u0001\u0000\u0000\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0176"+ - "\u0001\u0000\u0000\u0000\u0174\u0175\u0005.\u0000\u0000\u0175\u0177\u0007"+ - "\u0005\u0000\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001"+ - "\u0000\u0000\u0000\u01773\u0001\u0000\u0000\u0000\u0178\u0179\u0005\t"+ - "\u0000\u0000\u0179\u017e\u0003&\u0013\u0000\u017a\u017b\u0005\"\u0000"+ - "\u0000\u017b\u017d\u0003&\u0013\u0000\u017c\u017a\u0001\u0000\u0000\u0000"+ - "\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000"+ - "\u017e\u017f\u0001\u0000\u0000\u0000\u017f\u018b\u0001\u0000\u0000\u0000"+ - "\u0180\u017e\u0001\u0000\u0000\u0000\u0181\u0182\u0005\f\u0000\u0000\u0182"+ - "\u0187\u0003&\u0013\u0000\u0183\u0184\u0005\"\u0000\u0000\u0184\u0186"+ - "\u0003&\u0013\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186\u0189\u0001"+ - "\u0000\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187\u0188\u0001"+ - "\u0000\u0000\u0000\u0188\u018b\u0001\u0000\u0000\u0000\u0189\u0187\u0001"+ - "\u0000\u0000\u0000\u018a\u0178\u0001\u0000\u0000\u0000\u018a\u0181\u0001"+ - "\u0000\u0000\u0000\u018b5\u0001\u0000\u0000\u0000\u018c\u018d\u0005\u0002"+ - "\u0000\u0000\u018d\u0192\u0003&\u0013\u0000\u018e\u018f\u0005\"\u0000"+ - "\u0000\u018f\u0191\u0003&\u0013\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+ - "\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001\u0000\u0000\u0000"+ - "\u0192\u0193\u0001\u0000\u0000\u0000\u01937\u0001\u0000\u0000\u0000\u0194"+ - "\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005\r\u0000\u0000\u0196\u019b"+ - "\u0003:\u001d\u0000\u0197\u0198\u0005\"\u0000\u0000\u0198\u019a\u0003"+ - ":\u001d\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019d\u0001\u0000"+ - "\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000"+ - "\u0000\u0000\u019c9\u0001\u0000\u0000\u0000\u019d\u019b\u0001\u0000\u0000"+ - "\u0000\u019e\u019f\u0003&\u0013\u0000\u019f\u01a0\u0005H\u0000\u0000\u01a0"+ - "\u01a1\u0003&\u0013\u0000\u01a1;\u0001\u0000\u0000\u0000\u01a2\u01a3\u0005"+ - "\u0001\u0000\u0000\u01a3\u01a4\u0003\u0012\t\u0000\u01a4\u01a6\u0003N"+ - "\'\u0000\u01a5\u01a7\u0003B!\u0000\u01a6\u01a5\u0001\u0000\u0000\u0000"+ - "\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7=\u0001\u0000\u0000\u0000\u01a8"+ - "\u01a9\u0005\u0007\u0000\u0000\u01a9\u01aa\u0003\u0012\t\u0000\u01aa\u01ab"+ - "\u0003N\'\u0000\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u000b"+ - "\u0000\u0000\u01ad\u01ae\u0003&\u0013\u0000\u01aeA\u0001\u0000\u0000\u0000"+ - "\u01af\u01b4\u0003D\"\u0000\u01b0\u01b1\u0005\"\u0000\u0000\u01b1\u01b3"+ - "\u0003D\"\u0000\u01b2\u01b0\u0001\u0000\u0000\u0000\u01b3\u01b6\u0001"+ - "\u0000\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b5\u0001"+ - "\u0000\u0000\u0000\u01b5C\u0001\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000"+ - "\u0000\u0000\u01b7\u01b8\u0003*\u0015\u0000\u01b8\u01b9\u0005!\u0000\u0000"+ - "\u01b9\u01ba\u0003,\u0016\u0000\u01baE\u0001\u0000\u0000\u0000\u01bb\u01bc"+ - "\u0007\u0006\u0000\u0000\u01bcG\u0001\u0000\u0000\u0000\u01bd\u01c0\u0003"+ - "J%\u0000\u01be\u01c0\u0003L&\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000"+ - "\u01bf\u01be\u0001\u0000\u0000\u0000\u01c0I\u0001\u0000\u0000\u0000\u01c1"+ - "\u01c3\u0007\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000\u0000\u01c2"+ - "\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4"+ - "\u01c5\u0005\u001d\u0000\u0000\u01c5K\u0001\u0000\u0000\u0000\u01c6\u01c8"+ - "\u0007\u0000\u0000\u0000\u01c7\u01c6\u0001\u0000\u0000\u0000\u01c7\u01c8"+ - "\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01ca"+ - "\u0005\u001c\u0000\u0000\u01caM\u0001\u0000\u0000\u0000\u01cb\u01cc\u0005"+ - "\u001b\u0000\u0000\u01ccO\u0001\u0000\u0000\u0000\u01cd\u01ce\u0007\u0007"+ - "\u0000\u0000\u01ceQ\u0001\u0000\u0000\u0000\u01cf\u01d0\u0005\u0005\u0000"+ - "\u0000\u01d0\u01d1\u0003T*\u0000\u01d1S\u0001\u0000\u0000\u0000\u01d2"+ - "\u01d3\u0005A\u0000\u0000\u01d3\u01d4\u0003\u0002\u0001\u0000\u01d4\u01d5"+ - "\u0005B\u0000\u0000\u01d5U\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\u000f"+ - "\u0000\u0000\u01d7\u01db\u00054\u0000\u0000\u01d8\u01d9\u0005\u000f\u0000"+ - "\u0000\u01d9\u01db\u00055\u0000\u0000\u01da\u01d6\u0001\u0000\u0000\u0000"+ - "\u01da\u01d8\u0001\u0000\u0000\u0000\u01dbW\u0001\u0000\u0000\u0000\u01dc"+ - "\u01dd\u0005\u0003\u0000\u0000\u01dd\u01e0\u0003&\u0013\u0000\u01de\u01df"+ - "\u0005J\u0000\u0000\u01df\u01e1\u0003&\u0013\u0000\u01e0\u01de\u0001\u0000"+ - "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01eb\u0001\u0000"+ - "\u0000\u0000\u01e2\u01e3\u0005K\u0000\u0000\u01e3\u01e8\u0003Z-\u0000"+ - "\u01e4\u01e5\u0005\"\u0000\u0000\u01e5\u01e7\u0003Z-\u0000\u01e6\u01e4"+ - "\u0001\u0000\u0000\u0000\u01e7\u01ea\u0001\u0000\u0000\u0000\u01e8\u01e6"+ - "\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9\u01ec"+ - "\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01e2"+ - "\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecY\u0001"+ - "\u0000\u0000\u0000\u01ed\u01ee\u0003&\u0013\u0000\u01ee\u01ef\u0005!\u0000"+ - "\u0000\u01ef\u01f1\u0001\u0000\u0000\u0000\u01f0\u01ed\u0001\u0000\u0000"+ - "\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000"+ - "\u0000\u01f2\u01f3\u0003&\u0013\u0000\u01f3[\u0001\u0000\u0000\u00003"+ - "gn}\u0089\u0092\u009a\u009e\u00a6\u00a8\u00ad\u00b4\u00b9\u00c0\u00c6"+ - "\u00ce\u00d0\u00e0\u00e3\u00e7\u00f1\u00f9\u0101\u0105\u010e\u0118\u011c"+ - "\u0122\u0129\u0133\u0147\u0152\u015d\u0162\u016d\u0172\u0176\u017e\u0187"+ - "\u018a\u0192\u019b\u01a6\u01b4\u01bf\u01c2\u01c7\u01da\u01e0\u01e8\u01eb"+ - "\u01f0"; + "-\u0007-\u0002.\u0007.\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+ + "h\b\u0001\n\u0001\f\u0001k\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0003\u0002q\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0080\b\u0003"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u008c\b\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ + "\u0093\b\u0005\n\u0005\f\u0005\u0096\t\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u009d\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0003\u0005\u00a1\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00a9\b\u0005\n\u0005\f\u0005"+ + "\u00ac\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00b0\b\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00b7"+ + "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00bc\b\u0006"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007"+ + "\u00c3\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00c9\b\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00d1\b\b\n\b\f\b\u00d4"+ + "\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00dd"+ + "\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u00e5\b\n"+ + "\n\n\f\n\u00e8\t\n\u0003\n\u00ea\b\n\u0001\n\u0001\n\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f\u00f4\b\f\n\f\f\f\u00f7"+ + "\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003\r\u00fe\b\r\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e\u0104\b\u000e\n\u000e"+ + "\f\u000e\u0107\t\u000e\u0001\u000e\u0003\u000e\u010a\b\u000e\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0111\b\u000f"+ + "\n\u000f\f\u000f\u0114\t\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0003\u0011\u011d\b\u0011\u0001"+ + "\u0011\u0001\u0011\u0003\u0011\u0121\b\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0003\u0012\u0127\b\u0012\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0005\u0013\u012c\b\u0013\n\u0013\f\u0013\u012f\t\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u0136\b\u0015"+ + "\n\u0015\f\u0015\u0139\t\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005"+ + "\u0017\u014a\b\u0017\n\u0017\f\u0017\u014d\t\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0155\b\u0017"+ + "\n\u0017\f\u0017\u0158\t\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0160\b\u0017\n\u0017\f\u0017"+ + "\u0163\t\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0167\b\u0017\u0001"+ + "\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0005\u0019\u0170\b\u0019\n\u0019\f\u0019\u0173\t\u0019\u0001\u001a"+ + "\u0001\u001a\u0003\u001a\u0177\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ + "\u017b\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ + "\u0181\b\u001b\n\u001b\f\u001b\u0184\t\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0001\u001b\u0005\u001b\u018a\b\u001b\n\u001b\f\u001b\u018d\t\u001b"+ + "\u0003\u001b\u018f\b\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0005\u001c\u0195\b\u001c\n\u001c\f\u001c\u0198\t\u001c\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u019e\b\u001d\n\u001d\f\u001d"+ + "\u01a1\t\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u01ab\b\u001f\u0001 "+ + "\u0001 \u0001 \u0001 \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01b7\b\"\n\"\f\"\u01ba\t\"\u0001#\u0001#\u0001#\u0001#\u0001$\u0001"+ + "$\u0001%\u0001%\u0003%\u01c4\b%\u0001&\u0003&\u01c7\b&\u0001&\u0001&\u0001"+ + "\'\u0003\'\u01cc\b\'\u0001\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001"+ + "*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"+ + ",\u0003,\u01df\b,\u0001-\u0001-\u0001-\u0001-\u0003-\u01e5\b-\u0001-\u0001"+ + "-\u0001-\u0001-\u0005-\u01eb\b-\n-\f-\u01ee\t-\u0003-\u01f0\b-\u0001."+ + "\u0001.\u0001.\u0003.\u01f5\b.\u0001.\u0001.\u0001.\u0000\u0003\u0002"+ + "\n\u0010/\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016"+ + "\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\\u0000\b\u0001"+ + "\u0000<=\u0001\u0000>@\u0001\u0000LM\u0001\u0000CD\u0002\u0000 ##\u0001"+ + "\u0000&\'\u0002\u0000%%33\u0001\u00006;\u0217\u0000^\u0001\u0000\u0000"+ + "\u0000\u0002a\u0001\u0000\u0000\u0000\u0004p\u0001\u0000\u0000\u0000\u0006"+ + "\u007f\u0001\u0000\u0000\u0000\b\u0081\u0001\u0000\u0000\u0000\n\u00a0"+ + "\u0001\u0000\u0000\u0000\f\u00bb\u0001\u0000\u0000\u0000\u000e\u00c2\u0001"+ + "\u0000\u0000\u0000\u0010\u00c8\u0001\u0000\u0000\u0000\u0012\u00dc\u0001"+ + "\u0000\u0000\u0000\u0014\u00de\u0001\u0000\u0000\u0000\u0016\u00ed\u0001"+ + "\u0000\u0000\u0000\u0018\u00f0\u0001\u0000\u0000\u0000\u001a\u00fd\u0001"+ + "\u0000\u0000\u0000\u001c\u00ff\u0001\u0000\u0000\u0000\u001e\u010b\u0001"+ + "\u0000\u0000\u0000 \u0117\u0001\u0000\u0000\u0000\"\u011a\u0001\u0000"+ + "\u0000\u0000$\u0122\u0001\u0000\u0000\u0000&\u0128\u0001\u0000\u0000\u0000"+ + "(\u0130\u0001\u0000\u0000\u0000*\u0132\u0001\u0000\u0000\u0000,\u013a"+ + "\u0001\u0000\u0000\u0000.\u0166\u0001\u0000\u0000\u00000\u0168\u0001\u0000"+ + "\u0000\u00002\u016b\u0001\u0000\u0000\u00004\u0174\u0001\u0000\u0000\u0000"+ + "6\u018e\u0001\u0000\u0000\u00008\u0190\u0001\u0000\u0000\u0000:\u0199"+ + "\u0001\u0000\u0000\u0000<\u01a2\u0001\u0000\u0000\u0000>\u01a6\u0001\u0000"+ + "\u0000\u0000@\u01ac\u0001\u0000\u0000\u0000B\u01b0\u0001\u0000\u0000\u0000"+ + "D\u01b3\u0001\u0000\u0000\u0000F\u01bb\u0001\u0000\u0000\u0000H\u01bf"+ + "\u0001\u0000\u0000\u0000J\u01c3\u0001\u0000\u0000\u0000L\u01c6\u0001\u0000"+ + "\u0000\u0000N\u01cb\u0001\u0000\u0000\u0000P\u01cf\u0001\u0000\u0000\u0000"+ + "R\u01d1\u0001\u0000\u0000\u0000T\u01d3\u0001\u0000\u0000\u0000V\u01d6"+ + "\u0001\u0000\u0000\u0000X\u01de\u0001\u0000\u0000\u0000Z\u01e0\u0001\u0000"+ + "\u0000\u0000\\\u01f4\u0001\u0000\u0000\u0000^_\u0003\u0002\u0001\u0000"+ + "_`\u0005\u0000\u0000\u0001`\u0001\u0001\u0000\u0000\u0000ab\u0006\u0001"+ + "\uffff\uffff\u0000bc\u0003\u0004\u0002\u0000ci\u0001\u0000\u0000\u0000"+ + "de\n\u0001\u0000\u0000ef\u0005\u001a\u0000\u0000fh\u0003\u0006\u0003\u0000"+ + "gd\u0001\u0000\u0000\u0000hk\u0001\u0000\u0000\u0000ig\u0001\u0000\u0000"+ + "\u0000ij\u0001\u0000\u0000\u0000j\u0003\u0001\u0000\u0000\u0000ki\u0001"+ + "\u0000\u0000\u0000lq\u0003T*\u0000mq\u0003\u001c\u000e\u0000nq\u0003\u0016"+ + "\u000b\u0000oq\u0003X,\u0000pl\u0001\u0000\u0000\u0000pm\u0001\u0000\u0000"+ + "\u0000pn\u0001\u0000\u0000\u0000po\u0001\u0000\u0000\u0000q\u0005\u0001"+ + "\u0000\u0000\u0000r\u0080\u0003 \u0010\u0000s\u0080\u0003$\u0012\u0000"+ + "t\u0080\u00030\u0018\u0000u\u0080\u00036\u001b\u0000v\u0080\u00032\u0019"+ + "\u0000w\u0080\u0003\"\u0011\u0000x\u0080\u0003\b\u0004\u0000y\u0080\u0003"+ + "8\u001c\u0000z\u0080\u0003:\u001d\u0000{\u0080\u0003>\u001f\u0000|\u0080"+ + "\u0003@ \u0000}\u0080\u0003Z-\u0000~\u0080\u0003B!\u0000\u007fr\u0001"+ + "\u0000\u0000\u0000\u007fs\u0001\u0000\u0000\u0000\u007ft\u0001\u0000\u0000"+ + "\u0000\u007fu\u0001\u0000\u0000\u0000\u007fv\u0001\u0000\u0000\u0000\u007f"+ + "w\u0001\u0000\u0000\u0000\u007fx\u0001\u0000\u0000\u0000\u007fy\u0001"+ + "\u0000\u0000\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+ + "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+ + "~\u0001\u0000\u0000\u0000\u0080\u0007\u0001\u0000\u0000\u0000\u0081\u0082"+ + "\u0005\u0012\u0000\u0000\u0082\u0083\u0003\n\u0005\u0000\u0083\t\u0001"+ + "\u0000\u0000\u0000\u0084\u0085\u0006\u0005\uffff\uffff\u0000\u0085\u0086"+ + "\u0005,\u0000\u0000\u0086\u00a1\u0003\n\u0005\u0007\u0087\u00a1\u0003"+ + "\u000e\u0007\u0000\u0088\u00a1\u0003\f\u0006\u0000\u0089\u008b\u0003\u000e"+ + "\u0007\u0000\u008a\u008c\u0005,\u0000\u0000\u008b\u008a\u0001\u0000\u0000"+ + "\u0000\u008b\u008c\u0001\u0000\u0000\u0000\u008c\u008d\u0001\u0000\u0000"+ + "\u0000\u008d\u008e\u0005)\u0000\u0000\u008e\u008f\u0005(\u0000\u0000\u008f"+ + "\u0094\u0003\u000e\u0007\u0000\u0090\u0091\u0005\"\u0000\u0000\u0091\u0093"+ + "\u0003\u000e\u0007\u0000\u0092\u0090\u0001\u0000\u0000\u0000\u0093\u0096"+ + "\u0001\u0000\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0094\u0095"+ + "\u0001\u0000\u0000\u0000\u0095\u0097\u0001\u0000\u0000\u0000\u0096\u0094"+ + "\u0001\u0000\u0000\u0000\u0097\u0098\u00052\u0000\u0000\u0098\u00a1\u0001"+ + "\u0000\u0000\u0000\u0099\u009a\u0003\u000e\u0007\u0000\u009a\u009c\u0005"+ + "*\u0000\u0000\u009b\u009d\u0005,\u0000\u0000\u009c\u009b\u0001\u0000\u0000"+ + "\u0000\u009c\u009d\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000"+ + "\u0000\u009e\u009f\u0005-\u0000\u0000\u009f\u00a1\u0001\u0000\u0000\u0000"+ + "\u00a0\u0084\u0001\u0000\u0000\u0000\u00a0\u0087\u0001\u0000\u0000\u0000"+ + "\u00a0\u0088\u0001\u0000\u0000\u0000\u00a0\u0089\u0001\u0000\u0000\u0000"+ + "\u00a0\u0099\u0001\u0000\u0000\u0000\u00a1\u00aa\u0001\u0000\u0000\u0000"+ + "\u00a2\u00a3\n\u0004\u0000\u0000\u00a3\u00a4\u0005\u001f\u0000\u0000\u00a4"+ + "\u00a9\u0003\n\u0005\u0005\u00a5\u00a6\n\u0003\u0000\u0000\u00a6\u00a7"+ + "\u0005/\u0000\u0000\u00a7\u00a9\u0003\n\u0005\u0004\u00a8\u00a2\u0001"+ + "\u0000\u0000\u0000\u00a8\u00a5\u0001\u0000\u0000\u0000\u00a9\u00ac\u0001"+ + "\u0000\u0000\u0000\u00aa\u00a8\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001"+ + "\u0000\u0000\u0000\u00ab\u000b\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001"+ + "\u0000\u0000\u0000\u00ad\u00af\u0003\u000e\u0007\u0000\u00ae\u00b0\u0005"+ + ",\u0000\u0000\u00af\u00ae\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000"+ + "\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1\u00b2\u0005+\u0000"+ + "\u0000\u00b2\u00b3\u0003P(\u0000\u00b3\u00bc\u0001\u0000\u0000\u0000\u00b4"+ + "\u00b6\u0003\u000e\u0007\u0000\u00b5\u00b7\u0005,\u0000\u0000\u00b6\u00b5"+ + "\u0001\u0000\u0000\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8"+ + "\u0001\u0000\u0000\u0000\u00b8\u00b9\u00051\u0000\u0000\u00b9\u00ba\u0003"+ + "P(\u0000\u00ba\u00bc\u0001\u0000\u0000\u0000\u00bb\u00ad\u0001\u0000\u0000"+ + "\u0000\u00bb\u00b4\u0001\u0000\u0000\u0000\u00bc\r\u0001\u0000\u0000\u0000"+ + "\u00bd\u00c3\u0003\u0010\b\u0000\u00be\u00bf\u0003\u0010\b\u0000\u00bf"+ + "\u00c0\u0003R)\u0000\u00c0\u00c1\u0003\u0010\b\u0000\u00c1\u00c3\u0001"+ + "\u0000\u0000\u0000\u00c2\u00bd\u0001\u0000\u0000\u0000\u00c2\u00be\u0001"+ + "\u0000\u0000\u0000\u00c3\u000f\u0001\u0000\u0000\u0000\u00c4\u00c5\u0006"+ + "\b\uffff\uffff\u0000\u00c5\u00c9\u0003\u0012\t\u0000\u00c6\u00c7\u0007"+ + "\u0000\u0000\u0000\u00c7\u00c9\u0003\u0010\b\u0003\u00c8\u00c4\u0001\u0000"+ + "\u0000\u0000\u00c8\u00c6\u0001\u0000\u0000\u0000\u00c9\u00d2\u0001\u0000"+ + "\u0000\u0000\u00ca\u00cb\n\u0002\u0000\u0000\u00cb\u00cc\u0007\u0001\u0000"+ + "\u0000\u00cc\u00d1\u0003\u0010\b\u0003\u00cd\u00ce\n\u0001\u0000\u0000"+ + "\u00ce\u00cf\u0007\u0000\u0000\u0000\u00cf\u00d1\u0003\u0010\b\u0002\u00d0"+ + "\u00ca\u0001\u0000\u0000\u0000\u00d0\u00cd\u0001\u0000\u0000\u0000\u00d1"+ + "\u00d4\u0001\u0000\u0000\u0000\u00d2\u00d0\u0001\u0000\u0000\u0000\u00d2"+ + "\u00d3\u0001\u0000\u0000\u0000\u00d3\u0011\u0001\u0000\u0000\u0000\u00d4"+ + "\u00d2\u0001\u0000\u0000\u0000\u00d5\u00dd\u0003.\u0017\u0000\u00d6\u00dd"+ + "\u0003*\u0015\u0000\u00d7\u00dd\u0003\u0014\n\u0000\u00d8\u00d9\u0005"+ + "(\u0000\u0000\u00d9\u00da\u0003\n\u0005\u0000\u00da\u00db\u00052\u0000"+ + "\u0000\u00db\u00dd\u0001\u0000\u0000\u0000\u00dc\u00d5\u0001\u0000\u0000"+ + "\u0000\u00dc\u00d6\u0001\u0000\u0000\u0000\u00dc\u00d7\u0001\u0000\u0000"+ + "\u0000\u00dc\u00d8\u0001\u0000\u0000\u0000\u00dd\u0013\u0001\u0000\u0000"+ + "\u0000\u00de\u00df\u0003,\u0016\u0000\u00df\u00e9\u0005(\u0000\u0000\u00e0"+ + "\u00ea\u0005>\u0000\u0000\u00e1\u00e6\u0003\n\u0005\u0000\u00e2\u00e3"+ + "\u0005\"\u0000\u0000\u00e3\u00e5\u0003\n\u0005\u0000\u00e4\u00e2\u0001"+ + "\u0000\u0000\u0000\u00e5\u00e8\u0001\u0000\u0000\u0000\u00e6\u00e4\u0001"+ + "\u0000\u0000\u0000\u00e6\u00e7\u0001\u0000\u0000\u0000\u00e7\u00ea\u0001"+ + "\u0000\u0000\u0000\u00e8\u00e6\u0001\u0000\u0000\u0000\u00e9\u00e0\u0001"+ + "\u0000\u0000\u0000\u00e9\u00e1\u0001\u0000\u0000\u0000\u00e9\u00ea\u0001"+ + "\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb\u00ec\u0005"+ + "2\u0000\u0000\u00ec\u0015\u0001\u0000\u0000\u0000\u00ed\u00ee\u0005\u000e"+ + "\u0000\u0000\u00ee\u00ef\u0003\u0018\f\u0000\u00ef\u0017\u0001\u0000\u0000"+ + "\u0000\u00f0\u00f5\u0003\u001a\r\u0000\u00f1\u00f2\u0005\"\u0000\u0000"+ + "\u00f2\u00f4\u0003\u001a\r\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f4"+ + "\u00f7\u0001\u0000\u0000\u0000\u00f5\u00f3\u0001\u0000\u0000\u0000\u00f5"+ + "\u00f6\u0001\u0000\u0000\u0000\u00f6\u0019\u0001\u0000\u0000\u0000\u00f7"+ + "\u00f5\u0001\u0000\u0000\u0000\u00f8\u00fe\u0003\n\u0005\u0000\u00f9\u00fa"+ + "\u0003*\u0015\u0000\u00fa\u00fb\u0005!\u0000\u0000\u00fb\u00fc\u0003\n"+ + "\u0005\u0000\u00fc\u00fe\u0001\u0000\u0000\u0000\u00fd\u00f8\u0001\u0000"+ + "\u0000\u0000\u00fd\u00f9\u0001\u0000\u0000\u0000\u00fe\u001b\u0001\u0000"+ + "\u0000\u0000\u00ff\u0100\u0005\u0006\u0000\u0000\u0100\u0105\u0003(\u0014"+ + "\u0000\u0101\u0102\u0005\"\u0000\u0000\u0102\u0104\u0003(\u0014\u0000"+ + "\u0103\u0101\u0001\u0000\u0000\u0000\u0104\u0107\u0001\u0000\u0000\u0000"+ + "\u0105\u0103\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000\u0000\u0000"+ + "\u0106\u0109\u0001\u0000\u0000\u0000\u0107\u0105\u0001\u0000\u0000\u0000"+ + "\u0108\u010a\u0003\u001e\u000f\u0000\u0109\u0108\u0001\u0000\u0000\u0000"+ + "\u0109\u010a\u0001\u0000\u0000\u0000\u010a\u001d\u0001\u0000\u0000\u0000"+ + "\u010b\u010c\u0005A\u0000\u0000\u010c\u010d\u0005I\u0000\u0000\u010d\u0112"+ + "\u0003(\u0014\u0000\u010e\u010f\u0005\"\u0000\u0000\u010f\u0111\u0003"+ + "(\u0014\u0000\u0110\u010e\u0001\u0000\u0000\u0000\u0111\u0114\u0001\u0000"+ + "\u0000\u0000\u0112\u0110\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000"+ + "\u0000\u0000\u0113\u0115\u0001\u0000\u0000\u0000\u0114\u0112\u0001\u0000"+ + "\u0000\u0000\u0115\u0116\u0005B\u0000\u0000\u0116\u001f\u0001\u0000\u0000"+ + "\u0000\u0117\u0118\u0005\u0004\u0000\u0000\u0118\u0119\u0003\u0018\f\u0000"+ + "\u0119!\u0001\u0000\u0000\u0000\u011a\u011c\u0005\u0011\u0000\u0000\u011b"+ + "\u011d\u0003\u0018\f\u0000\u011c\u011b\u0001\u0000\u0000\u0000\u011c\u011d"+ + "\u0001\u0000\u0000\u0000\u011d\u0120\u0001\u0000\u0000\u0000\u011e\u011f"+ + "\u0005\u001e\u0000\u0000\u011f\u0121\u0003&\u0013\u0000\u0120\u011e\u0001"+ + "\u0000\u0000\u0000\u0120\u0121\u0001\u0000\u0000\u0000\u0121#\u0001\u0000"+ + "\u0000\u0000\u0122\u0123\u0005\b\u0000\u0000\u0123\u0126\u0003\u0018\f"+ + "\u0000\u0124\u0125\u0005\u001e\u0000\u0000\u0125\u0127\u0003&\u0013\u0000"+ + "\u0126\u0124\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000"+ + "\u0127%\u0001\u0000\u0000\u0000\u0128\u012d\u0003*\u0015\u0000\u0129\u012a"+ + "\u0005\"\u0000\u0000\u012a\u012c\u0003*\u0015\u0000\u012b\u0129\u0001"+ + "\u0000\u0000\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001"+ + "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\'\u0001\u0000"+ + "\u0000\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0007\u0002"+ + "\u0000\u0000\u0131)\u0001\u0000\u0000\u0000\u0132\u0137\u0003,\u0016\u0000"+ + "\u0133\u0134\u0005$\u0000\u0000\u0134\u0136\u0003,\u0016\u0000\u0135\u0133"+ + "\u0001\u0000\u0000\u0000\u0136\u0139\u0001\u0000\u0000\u0000\u0137\u0135"+ + "\u0001\u0000\u0000\u0000\u0137\u0138\u0001\u0000\u0000\u0000\u0138+\u0001"+ + "\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013b\u0007"+ + "\u0003\u0000\u0000\u013b-\u0001\u0000\u0000\u0000\u013c\u0167\u0005-\u0000"+ + "\u0000\u013d\u013e\u0003N\'\u0000\u013e\u013f\u0005C\u0000\u0000\u013f"+ + "\u0167\u0001\u0000\u0000\u0000\u0140\u0167\u0003L&\u0000\u0141\u0167\u0003"+ + "N\'\u0000\u0142\u0167\u0003H$\u0000\u0143\u0167\u00050\u0000\u0000\u0144"+ + "\u0167\u0003P(\u0000\u0145\u0146\u0005A\u0000\u0000\u0146\u014b\u0003"+ + "J%\u0000\u0147\u0148\u0005\"\u0000\u0000\u0148\u014a\u0003J%\u0000\u0149"+ + "\u0147\u0001\u0000\u0000\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b"+ + "\u0149\u0001\u0000\u0000\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c"+ + "\u014e\u0001\u0000\u0000\u0000\u014d\u014b\u0001\u0000\u0000\u0000\u014e"+ + "\u014f\u0005B\u0000\u0000\u014f\u0167\u0001\u0000\u0000\u0000\u0150\u0151"+ + "\u0005A\u0000\u0000\u0151\u0156\u0003H$\u0000\u0152\u0153\u0005\"\u0000"+ + "\u0000\u0153\u0155\u0003H$\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+ + "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+ + "\u0157\u0001\u0000\u0000\u0000\u0157\u0159\u0001\u0000\u0000\u0000\u0158"+ + "\u0156\u0001\u0000\u0000\u0000\u0159\u015a\u0005B\u0000\u0000\u015a\u0167"+ + "\u0001\u0000\u0000\u0000\u015b\u015c\u0005A\u0000\u0000\u015c\u0161\u0003"+ + "P(\u0000\u015d\u015e\u0005\"\u0000\u0000\u015e\u0160\u0003P(\u0000\u015f"+ + "\u015d\u0001\u0000\u0000\u0000\u0160\u0163\u0001\u0000\u0000\u0000\u0161"+ + "\u015f\u0001\u0000\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162"+ + "\u0164\u0001\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+ + "\u0165\u0005B\u0000\u0000\u0165\u0167\u0001\u0000\u0000\u0000\u0166\u013c"+ + "\u0001\u0000\u0000\u0000\u0166\u013d\u0001\u0000\u0000\u0000\u0166\u0140"+ + "\u0001\u0000\u0000\u0000\u0166\u0141\u0001\u0000\u0000\u0000\u0166\u0142"+ + "\u0001\u0000\u0000\u0000\u0166\u0143\u0001\u0000\u0000\u0000\u0166\u0144"+ + "\u0001\u0000\u0000\u0000\u0166\u0145\u0001\u0000\u0000\u0000\u0166\u0150"+ + "\u0001\u0000\u0000\u0000\u0166\u015b\u0001\u0000\u0000\u0000\u0167/\u0001"+ + "\u0000\u0000\u0000\u0168\u0169\u0005\n\u0000\u0000\u0169\u016a\u0005\u001c"+ + "\u0000\u0000\u016a1\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u0010\u0000"+ + "\u0000\u016c\u0171\u00034\u001a\u0000\u016d\u016e\u0005\"\u0000\u0000"+ + "\u016e\u0170\u00034\u001a\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170"+ + "\u0173\u0001\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171"+ + "\u0172\u0001\u0000\u0000\u0000\u01723\u0001\u0000\u0000\u0000\u0173\u0171"+ + "\u0001\u0000\u0000\u0000\u0174\u0176\u0003\n\u0005\u0000\u0175\u0177\u0007"+ + "\u0004\u0000\u0000\u0176\u0175\u0001\u0000\u0000\u0000\u0176\u0177\u0001"+ + "\u0000\u0000\u0000\u0177\u017a\u0001\u0000\u0000\u0000\u0178\u0179\u0005"+ + ".\u0000\u0000\u0179\u017b\u0007\u0005\u0000\u0000\u017a\u0178\u0001\u0000"+ + "\u0000\u0000\u017a\u017b\u0001\u0000\u0000\u0000\u017b5\u0001\u0000\u0000"+ + "\u0000\u017c\u017d\u0005\t\u0000\u0000\u017d\u0182\u0003(\u0014\u0000"+ + "\u017e\u017f\u0005\"\u0000\u0000\u017f\u0181\u0003(\u0014\u0000\u0180"+ + "\u017e\u0001\u0000\u0000\u0000\u0181\u0184\u0001\u0000\u0000\u0000\u0182"+ + "\u0180\u0001\u0000\u0000\u0000\u0182\u0183\u0001\u0000\u0000\u0000\u0183"+ + "\u018f\u0001\u0000\u0000\u0000\u0184\u0182\u0001\u0000\u0000\u0000\u0185"+ + "\u0186\u0005\f\u0000\u0000\u0186\u018b\u0003(\u0014\u0000\u0187\u0188"+ + "\u0005\"\u0000\u0000\u0188\u018a\u0003(\u0014\u0000\u0189\u0187\u0001"+ + "\u0000\u0000\u0000\u018a\u018d\u0001\u0000\u0000\u0000\u018b\u0189\u0001"+ + "\u0000\u0000\u0000\u018b\u018c\u0001\u0000\u0000\u0000\u018c\u018f\u0001"+ + "\u0000\u0000\u0000\u018d\u018b\u0001\u0000\u0000\u0000\u018e\u017c\u0001"+ + "\u0000\u0000\u0000\u018e\u0185\u0001\u0000\u0000\u0000\u018f7\u0001\u0000"+ + "\u0000\u0000\u0190\u0191\u0005\u0002\u0000\u0000\u0191\u0196\u0003(\u0014"+ + "\u0000\u0192\u0193\u0005\"\u0000\u0000\u0193\u0195\u0003(\u0014\u0000"+ + "\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0198\u0001\u0000\u0000\u0000"+ + "\u0196\u0194\u0001\u0000\u0000\u0000\u0196\u0197\u0001\u0000\u0000\u0000"+ + "\u01979\u0001\u0000\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199"+ + "\u019a\u0005\r\u0000\u0000\u019a\u019f\u0003<\u001e\u0000\u019b\u019c"+ + "\u0005\"\u0000\u0000\u019c\u019e\u0003<\u001e\u0000\u019d\u019b\u0001"+ + "\u0000\u0000\u0000\u019e\u01a1\u0001\u0000\u0000\u0000\u019f\u019d\u0001"+ + "\u0000\u0000\u0000\u019f\u01a0\u0001\u0000\u0000\u0000\u01a0;\u0001\u0000"+ + "\u0000\u0000\u01a1\u019f\u0001\u0000\u0000\u0000\u01a2\u01a3\u0003(\u0014"+ + "\u0000\u01a3\u01a4\u0005H\u0000\u0000\u01a4\u01a5\u0003(\u0014\u0000\u01a5"+ + "=\u0001\u0000\u0000\u0000\u01a6\u01a7\u0005\u0001\u0000\u0000\u01a7\u01a8"+ + "\u0003\u0012\t\u0000\u01a8\u01aa\u0003P(\u0000\u01a9\u01ab\u0003D\"\u0000"+ + "\u01aa\u01a9\u0001\u0000\u0000\u0000\u01aa\u01ab\u0001\u0000\u0000\u0000"+ + "\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u0007\u0000\u0000\u01ad"+ + "\u01ae\u0003\u0012\t\u0000\u01ae\u01af\u0003P(\u0000\u01afA\u0001\u0000"+ + "\u0000\u0000\u01b0\u01b1\u0005\u000b\u0000\u0000\u01b1\u01b2\u0003(\u0014"+ + "\u0000\u01b2C\u0001\u0000\u0000\u0000\u01b3\u01b8\u0003F#\u0000\u01b4"+ + "\u01b5\u0005\"\u0000\u0000\u01b5\u01b7\u0003F#\u0000\u01b6\u01b4\u0001"+ + "\u0000\u0000\u0000\u01b7\u01ba\u0001\u0000\u0000\u0000\u01b8\u01b6\u0001"+ + "\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9E\u0001\u0000"+ + "\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01bc\u0003,\u0016"+ + "\u0000\u01bc\u01bd\u0005!\u0000\u0000\u01bd\u01be\u0003.\u0017\u0000\u01be"+ + "G\u0001\u0000\u0000\u0000\u01bf\u01c0\u0007\u0006\u0000\u0000\u01c0I\u0001"+ + "\u0000\u0000\u0000\u01c1\u01c4\u0003L&\u0000\u01c2\u01c4\u0003N\'\u0000"+ + "\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c3\u01c2\u0001\u0000\u0000\u0000"+ + "\u01c4K\u0001\u0000\u0000\u0000\u01c5\u01c7\u0007\u0000\u0000\u0000\u01c6"+ + "\u01c5\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7"+ + "\u01c8\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005\u001d\u0000\u0000\u01c9"+ + "M\u0001\u0000\u0000\u0000\u01ca\u01cc\u0007\u0000\u0000\u0000\u01cb\u01ca"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01cd"+ + "\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005\u001c\u0000\u0000\u01ceO\u0001"+ + "\u0000\u0000\u0000\u01cf\u01d0\u0005\u001b\u0000\u0000\u01d0Q\u0001\u0000"+ + "\u0000\u0000\u01d1\u01d2\u0007\u0007\u0000\u0000\u01d2S\u0001\u0000\u0000"+ + "\u0000\u01d3\u01d4\u0005\u0005\u0000\u0000\u01d4\u01d5\u0003V+\u0000\u01d5"+ + "U\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005A\u0000\u0000\u01d7\u01d8\u0003"+ + "\u0002\u0001\u0000\u01d8\u01d9\u0005B\u0000\u0000\u01d9W\u0001\u0000\u0000"+ + "\u0000\u01da\u01db\u0005\u000f\u0000\u0000\u01db\u01df\u00054\u0000\u0000"+ + "\u01dc\u01dd\u0005\u000f\u0000\u0000\u01dd\u01df\u00055\u0000\u0000\u01de"+ + "\u01da\u0001\u0000\u0000\u0000\u01de\u01dc\u0001\u0000\u0000\u0000\u01df"+ + "Y\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005\u0003\u0000\u0000\u01e1\u01e4"+ + "\u0003(\u0014\u0000\u01e2\u01e3\u0005J\u0000\u0000\u01e3\u01e5\u0003("+ + "\u0014\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000"+ + "\u0000\u0000\u01e5\u01ef\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005K\u0000"+ + "\u0000\u01e7\u01ec\u0003\\.\u0000\u01e8\u01e9\u0005\"\u0000\u0000\u01e9"+ + "\u01eb\u0003\\.\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01ee"+ + "\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed"+ + "\u0001\u0000\u0000\u0000\u01ed\u01f0\u0001\u0000\u0000\u0000\u01ee\u01ec"+ + "\u0001\u0000\u0000\u0000\u01ef\u01e6\u0001\u0000\u0000\u0000\u01ef\u01f0"+ + "\u0001\u0000\u0000\u0000\u01f0[\u0001\u0000\u0000\u0000\u01f1\u01f2\u0003"+ + "(\u0014\u0000\u01f2\u01f3\u0005!\u0000\u0000\u01f3\u01f5\u0001\u0000\u0000"+ + "\u0000\u01f4\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000"+ + "\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0003(\u0014\u0000"+ + "\u01f7]\u0001\u0000\u0000\u00003ip\u007f\u008b\u0094\u009c\u00a0\u00a8"+ + "\u00aa\u00af\u00b6\u00bb\u00c2\u00c8\u00d0\u00d2\u00dc\u00e6\u00e9\u00f5"+ + "\u00fd\u0105\u0109\u0112\u011c\u0120\u0126\u012d\u0137\u014b\u0156\u0161"+ + "\u0166\u0171\u0176\u017a\u0182\u018b\u018e\u0196\u019f\u01aa\u01b8\u01c3"+ + "\u01c6\u01cb\u01de\u01e4\u01ec\u01ef\u01f4"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index ceef1b4e681a7..3137eff0b6550 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -252,6 +252,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitDereference(EsqlBaseParser.DereferenceContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFunction(EsqlBaseParser.FunctionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFunction(EsqlBaseParser.FunctionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 48f5b33fcfec1..d7b2f359e3c83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -152,6 +152,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitDereference(EsqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFunction(EsqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 04f0d6da3dbe4..dd6cdaacddbef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -237,6 +237,18 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitDereference(EsqlBaseParser.DereferenceContext ctx); + /** + * Enter a parse tree produced by the {@code function} + * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void enterFunction(EsqlBaseParser.FunctionContext ctx); + /** + * Exit a parse tree produced by the {@code function} + * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void exitFunction(EsqlBaseParser.FunctionContext ctx); /** * Enter a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. @@ -250,14 +262,12 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx); /** - * Enter a parse tree produced by the {@code functionExpression} - * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * Enter a parse tree produced by {@link EsqlBaseParser#functionExpression}. * @param ctx the parse tree */ void enterFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx); /** - * Exit a parse tree produced by the {@code functionExpression} - * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * Exit a parse tree produced by {@link EsqlBaseParser#functionExpression}. * @param ctx the parse tree */ void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 681de2590d575..35297f3d4f336 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -145,6 +145,13 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitDereference(EsqlBaseParser.DereferenceContext ctx); + /** + * Visit a parse tree produced by the {@code function} + * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunction(EsqlBaseParser.FunctionContext ctx); /** * Visit a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. @@ -153,8 +160,7 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx); /** - * Visit a parse tree produced by the {@code functionExpression} - * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * Visit a parse tree produced by {@link EsqlBaseParser#functionExpression}. * @param ctx the parse tree * @return the visitor result */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index aa653d36d141b..a7c8d6dd49cc7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.Order; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -62,6 +63,7 @@ import java.util.function.BiFunction; import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.DATE_PERIOD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.TIME_DURATION; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; @@ -312,12 +314,15 @@ public UnresolvedAttribute visitDereference(EsqlBaseParser.DereferenceContext ct @Override public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { - return new UnresolvedFunction( - source(ctx), - visitIdentifier(ctx.identifier()), - FunctionResolutionStrategy.DEFAULT, - ctx.booleanExpression().stream().map(this::expression).toList() - ); + String name = visitIdentifier(ctx.identifier()); + List args = expressions(ctx.booleanExpression()); + if ("count".equals(EsqlFunctionRegistry.normalizeName(name))) { + // to simplify the registration, handle in the parser the special count cases + if (args.isEmpty() || ctx.ASTERISK() != null) { + args = singletonList(new Literal(source(ctx), "*", DataTypes.KEYWORD)); + } + } + return new UnresolvedFunction(source(ctx), name, FunctionResolutionStrategy.DEFAULT, args); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java new file mode 100644 index 0000000000000..8e65e66e3045f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.physical; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.index.EsIndex; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.NodeUtils; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.Objects; + +/** + * Specialized query class for retrieving statistics about the underlying data and not the actual documents. + * For that see {@link EsQueryExec} + */ +public class EsStatsQueryExec extends LeafExec implements EstimatesRowSize { + + public enum StatsType { + COUNT, + MIN, + MAX, + EXISTS; + } + + public record Stat(String name, StatsType type) {}; + + private final EsIndex index; + private final QueryBuilder query; + private final Expression limit; + private final List attrs; + private final List stats; + + public EsStatsQueryExec( + Source source, + EsIndex index, + QueryBuilder query, + Expression limit, + List attributes, + List stats + ) { + super(source); + this.index = index; + this.query = query; + this.limit = limit; + this.attrs = attributes; + this.stats = stats; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, EsStatsQueryExec::new, index, query, limit, attrs, stats); + } + + public EsIndex index() { + return index; + } + + public QueryBuilder query() { + return query; + } + + @Override + public List output() { + return attrs; + } + + public Expression limit() { + return limit; + } + + @Override + // TODO - get the estimation outside the plan so it doesn't touch the plan + public PhysicalPlan estimateRowSize(State state) { + int size; + state.add(false, attrs); + size = state.consumeAllFields(false); + return this; + } + + @Override + public int hashCode() { + return Objects.hash(index, query, limit, attrs, stats); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + EsStatsQueryExec other = (EsStatsQueryExec) obj; + return Objects.equals(index, other.index) + && Objects.equals(attrs, other.attrs) + && Objects.equals(query, other.query) + && Objects.equals(limit, other.limit) + && Objects.equals(stats, other.stats); + } + + @Override + public String nodeString() { + return nodeName() + + "[" + + index + + "], stats" + + stats + + "], query[" + + (query != null ? Strings.toString(query, false, true) : "") + + "]" + + NodeUtils.limitedToString(attrs) + + ", limit[" + + (limit != null ? limit.toString() : "") + + "], "; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 0e984b3b85b0b..113e4b91232ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.operator.HashAggregationOperator.HashAggregationOperatorFactory; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; @@ -35,7 +36,9 @@ import java.util.Set; import java.util.function.Consumer; -abstract class AbstractPhysicalOperationProviders implements PhysicalOperationProviders { +import static java.util.Collections.emptyList; + +public abstract class AbstractPhysicalOperationProviders implements PhysicalOperationProviders { private final AggregateMapper aggregateMapper = new AggregateMapper(); @@ -235,7 +238,30 @@ private void aggregatesToFactory( if (mode == AggregateExec.Mode.PARTIAL) { aggMode = AggregatorMode.INITIAL; // TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1) - sourceAttr = List.of(Expressions.attribute(aggregateFunction.field())); + Expression field = aggregateFunction.field(); + // Only count can now support literals - all the other aggs should be optimized away + if (field.foldable()) { + if (aggregateFunction instanceof Count count) { + sourceAttr = emptyList(); + } else { + throw new EsqlIllegalArgumentException( + "Does not support yet aggregations over constants - [{}]", + aggregateFunction.sourceText() + ); + } + } else { + Attribute attr = Expressions.attribute(field); + // cannot determine attribute + if (attr == null) { + throw new EsqlIllegalArgumentException( + "Cannot work with target field [{}] for agg [{}]", + field.sourceText(), + aggregateFunction.sourceText() + ); + } + sourceAttr = List.of(attr); + } + } else if (mode == AggregateExec.Mode.FINAL) { aggMode = AggregatorMode.FINAL; if (grouping) { @@ -253,7 +279,9 @@ private void aggregatesToFactory( } List inputChannels = sourceAttr.stream().map(attr -> layout.get(attr.id()).channel()).toList(); - assert inputChannels != null && inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0); + if (inputChannels.size() > 0) { + assert inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0); + } if (aggregateFunction instanceof ToAggregator agg) { consumer.accept(new AggFunctionSupplierContext(agg.supplier(bigArrays, inputChannels), aggMode)); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index ac62c45d4d1f3..ce5e277deaad8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -20,6 +20,8 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.logging.LogManager; @@ -54,6 +56,10 @@ public EsPhysicalOperationProviders(List searchContexts) { this.searchContexts = searchContexts; } + public List searchContexts() { + return searchContexts; + } + @Override public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fieldExtractExec, PhysicalOperation source) { Layout.Builder layout = source.layout.builder(); @@ -85,12 +91,12 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi return op; } - @Override - public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) { - final LuceneOperator.Factory luceneFactory; - Function querySupplier = searchContext -> { + public static Function querySupplier(QueryBuilder queryBuilder) { + final QueryBuilder qb = queryBuilder == null ? QueryBuilders.matchAllQuery() : queryBuilder; + + return searchContext -> { SearchExecutionContext ctx = searchContext.getSearchExecutionContext(); - Query query = ctx.toQuery(esQueryExec.query()).query(); + Query query = ctx.toQuery(qb).query(); NestedLookup nestedLookup = ctx.nestedLookup(); if (nestedLookup != NestedLookup.EMPTY) { NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped); @@ -110,6 +116,12 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, } return query; }; + } + + @Override + public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) { + Function querySupplier = querySupplier(esQueryExec.query()); + final LuceneOperator.Factory luceneFactory; List sorts = esQueryExec.sorts(); List> fieldSorts = null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 18fad8cecb014..156b93e1551c4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.apache.lucene.search.Query; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.Describable; @@ -15,6 +16,8 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.DataPartitioning; +import org.elasticsearch.compute.lucene.LuceneCountOperator; +import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.operator.ColumnExtractOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -43,7 +46,7 @@ import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; @@ -54,6 +57,7 @@ import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; @@ -96,6 +100,7 @@ import java.util.stream.Stream; import static java.util.stream.Collectors.joining; +import static org.elasticsearch.compute.lucene.LuceneOperator.NO_LIMIT; import static org.elasticsearch.compute.operator.LimitOperator.Factory; import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory; @@ -196,6 +201,8 @@ private PhysicalOperation plan(PhysicalPlan node, LocalExecutionPlannerContext c // source nodes else if (node instanceof EsQueryExec esQuery) { return planEsQueryNode(esQuery, context); + } else if (node instanceof EsStatsQueryExec statsQuery) { + return planEsStats(statsQuery, context); } else if (node instanceof RowExec row) { return planRow(row, context); } else if (node instanceof LocalSourceExec localSource) { @@ -224,19 +231,33 @@ private PhysicalOperation planAggregation(AggregateExec aggregate, LocalExecutio return physicalOperationProviders.groupingPhysicalOperation(aggregate, source, context); } - private PhysicalOperation planEsQueryNode(EsQueryExec esQuery, LocalExecutionPlannerContext context) { - if (esQuery.query() == null) { - esQuery = new EsQueryExec( - esQuery.source(), - esQuery.index(), - esQuery.output(), - new MatchAllQueryBuilder(), - esQuery.limit(), - esQuery.sorts(), - esQuery.estimatedRowSize() - ); + private PhysicalOperation planEsQueryNode(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) { + return physicalOperationProviders.sourcePhysicalOperation(esQueryExec, context); + } + + private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutionPlannerContext context) { + if (physicalOperationProviders instanceof EsPhysicalOperationProviders == false) { + throw new EsqlIllegalArgumentException("EsStatsQuery should only occur against a Lucene backend"); } - return physicalOperationProviders.sourcePhysicalOperation(esQuery, context); + EsPhysicalOperationProviders esProvider = (EsPhysicalOperationProviders) physicalOperationProviders; + + Function querySupplier = EsPhysicalOperationProviders.querySupplier(statsQuery.query()); + + Expression limitExp = statsQuery.limit(); + int limit = limitExp != null ? (Integer) limitExp.fold() : NO_LIMIT; + final LuceneOperator.Factory luceneFactory = new LuceneCountOperator.Factory( + esProvider.searchContexts(), + querySupplier, + context.dataPartitioning(), + context.taskConcurrency(), + limit + ); + + Layout.Builder layout = new Layout.Builder(); + layout.append(statsQuery.outputSet()); + int instanceCount = Math.max(1, luceneFactory.taskConcurrency()); + context.driverParallelism(new DriverParallelism(DriverParallelism.Type.DATA_PARALLELISM, instanceCount)); + return PhysicalOperation.fromSource(luceneFactory, layout.build()); } private PhysicalOperation planFieldExtractNode(LocalExecutionPlannerContext context, FieldExtractExec fieldExtractExec) { @@ -318,11 +339,11 @@ private PhysicalOperation planExchange(ExchangeExec exchangeExec, LocalExecution private PhysicalOperation planExchangeSink(ExchangeSinkExec exchangeSink, LocalExecutionPlannerContext context) { Objects.requireNonNull(exchangeSinkHandler, "ExchangeSinkHandler wasn't provided"); - PhysicalOperation source = plan(exchangeSink.child(), context); + var child = exchangeSink.child(); + PhysicalOperation source = plan(child, context); - Function transformer = exchangeSink.child() instanceof AggregateExec - ? Function.identity() - : alignPageToAttributes(exchangeSink.output(), source.layout); + boolean isAgg = child instanceof AggregateExec || child instanceof EsStatsQueryExec; + Function transformer = isAgg ? Function.identity() : alignPageToAttributes(exchangeSink.output(), source.layout); return source.withSink(new ExchangeSinkOperatorFactory(exchangeSinkHandler::createExchangeSink, transformer), source.layout); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java deleted file mode 100644 index 1ca1d5e217f6a..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plugin; - -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.breaker.NoopCircuitBreaker; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.BlockFactoryParameters; - -/** A provider for sharing the given parameters with the compute engine's block factory. */ -public class EsqlBlockFactoryParams implements BlockFactoryParameters { - - static final CircuitBreaker NOOP_BREAKER = new NoopCircuitBreaker("ESQL-noop-breaker"); - - static CircuitBreaker ESQL_BREAKER; - static BigArrays ESQL_BIGARRAYS; - - static void init(BigArrays bigArrays) { - ESQL_BREAKER = bigArrays.breakerService().getBreaker("request"); - ESQL_BIGARRAYS = bigArrays; - } - - final CircuitBreaker breaker; - final BigArrays bigArrays; - - public EsqlBlockFactoryParams() { - this.breaker = ESQL_BREAKER; - this.bigArrays = ESQL_BIGARRAYS; - } - - @Override - public CircuitBreaker breaker() { - return breaker != null ? breaker : NOOP_BREAKER; - } - - @Override - public BigArrays bigArrays() { - return bigArrays != null ? bigArrays : BigArrays.NON_RECYCLING_INSTANCE; - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 1ff00401029cf..b9ab0f7646b96 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -37,6 +38,7 @@ import java.time.ZoneOffset; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.concurrent.Executor; public class TransportEsqlQueryAction extends HandledTransportAction { @@ -69,8 +71,7 @@ public TransportEsqlQueryAction( this.requestExecutor = threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); exchangeService.registerTransportHandler(transportService); this.exchangeService = exchangeService; - EsqlBlockFactoryParams.init(bigArrays); - var blockFactory = BlockFactory.getGlobalInstance(); + var blockFactory = createBlockFactory(bigArrays); this.enrichPolicyResolver = new EnrichPolicyResolver(clusterService, transportService, planExecutor.indexResolver()); this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays, blockFactory); this.computeService = new ComputeService( @@ -85,6 +86,12 @@ public TransportEsqlQueryAction( this.settings = settings; } + static BlockFactory createBlockFactory(BigArrays bigArrays) { + CircuitBreaker circuitBreaker = bigArrays.breakerService().getBreaker("request"); + Objects.requireNonNull(circuitBreaker, "request circuit breaker wasn't set"); + return new BlockFactory(circuitBreaker, bigArrays); + } + @Override protected void doExecute(Task task, EsqlQueryRequest request, ActionListener listener) { // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can diff --git a/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters b/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters deleted file mode 100644 index e397954c84cbe..0000000000000 --- a/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.esql.plugin.EsqlBlockFactoryParams diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 8a5b021addae5..a3b63e7e34c37 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -327,7 +327,7 @@ private ActualResults executePlan() throws Exception { sessionId, new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, - BlockFactory.getGlobalInstance(), + BlockFactory.getNonBreakingInstance(), configuration, exchangeSource, exchangeSink, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index a190eb867ff63..6633e5ae3c0fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -187,7 +187,9 @@ public final void testEvaluate() { // TODO should we convert unsigned_long into BigDecimal so it's easier to assert? Object result; try (ExpressionEvaluator evaluator = evaluator(expression).get(driverContext())) { - result = toJavaObject(evaluator.eval(row(testCase.getDataValues())), 0); + try (Block.Ref ref = evaluator.eval(row(testCase.getDataValues()))) { + result = toJavaObject(ref.block(), 0); + } } assertThat(result, not(equalTo(Double.NaN))); assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); @@ -216,7 +218,9 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert data.add(simpleData.get(b)); } } - assertSimpleWithNulls(data, eval.eval(new Page(blocks)), i); + try (Block.Ref ref = eval.eval(new Page(blocks))) { + assertSimpleWithNulls(data, ref.block(), i); + } } } } @@ -242,7 +246,9 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru futures.add(exec.submit(() -> { try (EvalOperator.ExpressionEvaluator eval = evalSupplier.get(driverContext())) { for (int c = 0; c < count; c++) { - assertThat(toJavaObject(eval.eval(page), 0), testCase.getMatcher()); + try (Block.Ref ref = eval.eval(page)) { + assertThat(toJavaObject(ref.block(), 0), testCase.getMatcher()); + } } } })); @@ -631,8 +637,9 @@ private static void writeToTempDir(String subdir, String str, String extension) */ protected DriverContext driverContext() { MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); - breakers.add(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST)); - return new DriverContext(bigArrays.withCircuitBreaking(), BlockFactory.getGlobalInstance()); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); } @After diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 50c2e10558508..f776e1a4655d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -87,12 +87,14 @@ protected Expression build(Source source, List args) { } public void testEvalCase() { - testCase( - caseExpr -> toJavaObject( - caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1))), - 0 - ) - ); + testCase(caseExpr -> { + try ( + EvalOperator.ExpressionEvaluator eval = caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext()); + Block.Ref ref = eval.eval(new Page(IntBlock.newConstantBlockWith(0, 1))) + ) { + return toJavaObject(ref.block(), 0); + } + }); } public void testFoldCase() { @@ -146,12 +148,12 @@ public void testCaseWithIncompatibleTypes() { public void testCaseIsLazy() { Case caseExpr = caseExpr(true, 1, true, 2); - assertEquals(1, toJavaObject(caseExpr.toEvaluator(child -> { + try (Block.Ref ref = caseExpr.toEvaluator(child -> { Object value = child.fold(); if (value != null && value.equals(2)) { return dvrCtx -> new EvalOperator.ExpressionEvaluator() { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { fail("Unexpected evaluation of 4th argument"); return null; } @@ -161,7 +163,9 @@ public void close() {} }; } return evaluator(child); - }).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0)); + }).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1)))) { + assertEquals(1, toJavaObject(ref.block(), 0)); + } } private static Case caseExpr(Object... args) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 853fe44d12ec9..15117b4c06946 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -84,18 +85,21 @@ public void testExamples() { } private Object process(Number val) { - return toJavaObject( - evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), null)).get(driverContext()).eval(row(List.of(val))), - 0 - ); + try ( + Block.Ref ref = evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), null)).get(driverContext()).eval(row(List.of(val))) + ) { + return toJavaObject(ref.block(), 0); + } } private Object process(Number val, int decimals) { - return toJavaObject( - evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), field("decimals", DataTypes.INTEGER))).get(driverContext()) - .eval(row(List.of(val, decimals))), - 0 - ); + try ( + Block.Ref ref = evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), field("decimals", DataTypes.INTEGER))).get( + driverContext() + ).eval(row(List.of(val, decimals))) + ) { + return toJavaObject(ref.block(), 0); + } } private DataType typeOf(Number val) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 714112b2db543..e15403bc52e46 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -26,6 +26,8 @@ import java.math.BigInteger; import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.function.BiFunction; import java.util.stream.DoubleStream; @@ -385,7 +387,17 @@ private static > void putInOrder(List mvData, Block.M switch (ordering) { case UNORDERED -> { } - case ASCENDING -> Collections.sort(mvData); + case DEDUPLICATED_UNORDERD -> { + var dedup = new LinkedHashSet<>(mvData); + mvData.clear(); + mvData.addAll(dedup); + } + case DEDUPLICATED_AND_SORTED_ASCENDING -> { + var dedup = new HashSet<>(mvData); + mvData.clear(); + mvData.addAll(dedup); + Collections.sort(mvData); + } default -> throw new UnsupportedOperationException("unsupported ordering [" + ordering + "]"); } } @@ -443,17 +455,19 @@ private void testBlock(boolean insertNulls) { } builder.copyFrom(oneRowBlock, 0, 1); } - Block input = builder.build(); - Block result = evaluator(buildFieldExpression(testCase)).get(driverContext()).eval(new Page(input)); - - assertThat(result.getPositionCount(), equalTo(result.getPositionCount())); - for (int p = 0; p < input.getPositionCount(); p++) { - if (input.isNull(p)) { - assertThat(result.isNull(p), equalTo(true)); - continue; + try ( + Block input = builder.build(); + Block.Ref ref = evaluator(buildFieldExpression(testCase)).get(driverContext()).eval(new Page(input)) + ) { + assertThat(ref.block().getPositionCount(), equalTo(ref.block().getPositionCount())); + for (int p = 0; p < input.getPositionCount(); p++) { + if (input.isNull(p)) { + assertThat(ref.block().isNull(p), equalTo(true)); + continue; + } + assertThat(ref.block().isNull(p), equalTo(false)); + assertThat(toJavaObject(ref.block(), p), testCase.getMatcher()); } - assertThat(result.isNull(p), equalTo(false)); - assertThat(toJavaObject(result, p), testCase.getMatcher()); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java index e7670c9840b91..b1f2d4f0657bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java @@ -11,86 +11,69 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.util.Arrays; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; -public class MvConcatTests extends AbstractScalarFunctionTestCase { +public class MvConcatTests extends AbstractFunctionTestCase { public MvConcatTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("mv_concat basic test", () -> { - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData( - List.of(new BytesRef("foo"), new BytesRef("bar"), new BytesRef("baz")), - DataTypes.KEYWORD, - "field" - ), - new TestCaseSupplier.TypedData(new BytesRef(", "), DataTypes.KEYWORD, "delim") - ), - "MvConcat[field=Attribute[channel=0], delim=Attribute[channel=1]]", - DataTypes.KEYWORD, - equalTo(new BytesRef("foo, bar, baz")) - ); - }))); + List suppliers = new ArrayList<>(); + for (DataType fieldType : EsqlDataTypes.types()) { + if (EsqlDataTypes.isString(fieldType) == false) { + continue; + } + for (DataType delimType : EsqlDataTypes.types()) { + if (EsqlDataTypes.isString(delimType) == false) { + continue; + } + for (int l = 1; l < 10; l++) { + int length = l; + suppliers.add(new TestCaseSupplier(fieldType + "/" + l + " " + delimType, List.of(fieldType, delimType), () -> { + String delim = randomAlphaOfLengthBetween(0, 5); + List data = new ArrayList<>(length); + String expected = null; + for (int i = 0; i < length; i++) { + String value = randomRealisticUnicodeOfLengthBetween(0, 10); + data.add(new BytesRef(value)); + if (expected == null) { + expected = value; + } else { + expected += delim + value; + } + } + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(data, fieldType, "field"), + new TestCaseSupplier.TypedData(new BytesRef(delim), delimType, "delim") + ), + "MvConcat[field=Attribute[channel=0], delim=Attribute[channel=1]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(expected)) + ); + })); + } + } + } + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); } @Override protected Expression build(Source source, List args) { return new MvConcat(source, args.get(0), args.get(1)); } - - @Override - protected List argSpec() { - return List.of(required(strings()), required(strings())); - } - - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.KEYWORD; - } - - public void testNull() { - // TODO: add these into the test parameters - BytesRef foo = new BytesRef("foo"); - BytesRef bar = new BytesRef("bar"); - BytesRef delim = new BytesRef(";"); - Expression expression = buildFieldExpression(testCase); - DriverContext dvrCtx = driverContext(); - - assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(Arrays.asList(foo, bar), null))), 0), nullValue()); - assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(foo, null))), 0), nullValue()); - assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(null, null))), 0), nullValue()); - - assertThat( - toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(Arrays.asList(foo, bar), Arrays.asList(delim, bar)))), 0), - nullValue() - ); - assertThat( - toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(foo, Arrays.asList(delim, bar)))), 0), - nullValue() - ); - assertThat( - toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(null, Arrays.asList(delim, bar)))), 0), - nullValue() - ); - - assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(null, delim))), 0), nullValue()); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index f6a41a042e68c..8db6b1bbd0c93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -28,6 +28,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; @@ -84,11 +85,11 @@ public void testCoalesceIsLazy() { Layout.Builder builder = new Layout.Builder(); buildLayout(builder, exp); Layout layout = builder.build(); - assertThat(toJavaObject(exp.toEvaluator(child -> { + Function map = child -> { if (child == evil) { return dvrCtx -> new EvalOperator.ExpressionEvaluator() { @Override - public Block eval(Page page) { + public Block.Ref eval(Page page) { throw new AssertionError("shouldn't be called"); } @@ -97,7 +98,13 @@ public void close() {} }; } return EvalMapper.toEvaluator(child, layout); - }).get(driverContext()).eval(row(testCase.getDataValues())), 0), testCase.getMatcher()); + }; + try ( + EvalOperator.ExpressionEvaluator eval = exp.toEvaluator(map).get(driverContext()); + Block.Ref ref = eval.eval(row(testCase.getDataValues())) + ) { + assertThat(toJavaObject(ref.block(), 0), testCase.getMatcher()); + } } public void testCoalesceNullabilityIsUnknown() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index 3285b5848d311..caec572351675 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -117,8 +118,11 @@ public void testSomeConstant() { assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); return; } - try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { - assertThat(toJavaObject(eval.eval(row(fieldValues)), 0), testCase.getMatcher()); + try ( + EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext()); + Block.Ref ref = eval.eval(row(fieldValues)) + ) { + assertThat(toJavaObject(ref.block(), 0), testCase.getMatcher()); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index 3d9e8d677f3e4..edbfec2bc5d85 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -196,13 +197,17 @@ public void testUnicode() { } private String process(String str, int length) { - Block result = evaluator( - new Left(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) - ).get(driverContext()).eval(row(List.of(new BytesRef(str)))); - if (null == result) { - return null; + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Left(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) + ).get(driverContext()); + Block.Ref ref = eval.eval(row(List.of(new BytesRef(str)))) + ) { + if (ref.block().isNull(0)) { + return null; + } + BytesRef resultByteRef = ((BytesRef) toJavaObject(ref.block(), 0)); + return resultByteRef == null ? null : resultByteRef.utf8ToString(); } - BytesRef resultByteRef = ((BytesRef) toJavaObject(result, 0)); - return resultByteRef == null ? null : resultByteRef.utf8ToString(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java new file mode 100644 index 0000000000000..06ddb220877e1 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.hamcrest.Matchers.equalTo; + +public class ReplaceTests extends AbstractScalarFunctionTestCase { + public ReplaceTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add(new TestCaseSupplier("basic", () -> { + String text = randomAlphaOfLength(10); + String oldStr = text.substring(1, 2); + String newStr = randomAlphaOfLength(5); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(oldStr), DataTypes.KEYWORD, "oldStr"), + new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") + ), + "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(text.replaceAll(oldStr, newStr))) + ); + })); + // a syntactically wrong regex should yield null. And a warning header + // but for now we are letting the exception pass through. See also https://github.com/elastic/elasticsearch/issues/100038 + // suppliers.add(new TestCaseSupplier("invalid_regex", () -> { + // String text = randomAlphaOfLength(10); + // String invalidRegex = "["; + // String newStr = randomAlphaOfLength(5); + // return new TestCaseSupplier.TestCase( + // List.of( + // new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), + // new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataTypes.KEYWORD, "oldStr"), + // new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") + // ), + // "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", + // DataTypes.KEYWORD, + // equalTo(null) + // ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + // .withWarning("java.util.regex.PatternSyntaxException: Unclosed character class near index 0\r\n[\r\n^"); + // })); + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.KEYWORD; + } + + public Matcher resultsMatcher(List typedData) { + String str = ((BytesRef) typedData.get(0).data()).utf8ToString(); + String oldStr = ((BytesRef) typedData.get(1).data()).utf8ToString(); + String newStr = ((BytesRef) typedData.get(2).data()).utf8ToString(); + return equalTo(new BytesRef(str.replaceAll(oldStr, newStr))); + } + + @Override + protected List argSpec() { + return List.of(required(strings()), required(strings()), required(strings())); + } + + @Override + protected Expression build(Source source, List args) { + return new Replace(source, args.get(0), args.get(1), args.get(2)); + } + + public void testReplaceString() { + assertThat(process("a tiger", "a", "pp"), equalTo("pp tiger")); + assertThat(process("a tiger is always a tiger", "a", "pp"), equalTo("pp tiger is pplwppys pp tiger")); + assertThat(process("a tiger", "ti ", ""), equalTo("a tiger")); + assertThat(process("a tiger", " ti", ""), equalTo("ager")); + } + + public void testReplaceRegex() { + assertThat(process("what a nice day", "\\s+", "-"), equalTo("what-a-nice-day")); + assertThat(process("I love cats and cats are amazing.", "\\bcats\\b", "dogs"), equalTo("I love dogs and dogs are amazing.")); + } + + public void testUnicode() { + final String s = "a\ud83c\udf09tiger"; + assertThat(process(s, "a\ud83c\udf09t", "pp"), equalTo("ppiger")); + assertThat(process(s, "\ud83c\udf09", "\ud83c\udf09\ud83c\udf09"), equalTo("a\ud83c\udf09\ud83c\udf09tiger")); + } + + private String process(String str, String oldStr, String newStr) { + List list = Arrays.asList(new BytesRef(str), new BytesRef(oldStr), new BytesRef(newStr)); + Block.Ref result = evaluator( + new Replace( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("oldStr", DataTypes.KEYWORD), + field("newStr", DataTypes.KEYWORD) + ) + ).get(driverContext()).eval(row(list)); + BytesRef resultValue = (BytesRef) toJavaObject(result.block(), 0); + return resultValue == null ? null : resultValue.utf8ToString(); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index ca9d1ef2dc1ee..04b8dd4079028 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -198,13 +199,17 @@ public void testUnicode() { } private String process(String str, int length) { - Block result = evaluator( - new Right(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) - ).get(driverContext()).eval(row(List.of(new BytesRef(str)))); - if (null == result) { - return null; + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Right(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) + ).get(driverContext()); + Block.Ref ref = eval.eval(row(List.of(new BytesRef(str)))) + ) { + if (ref.block().isNull(0)) { + return null; + } + BytesRef resultByteRef = ((BytesRef) toJavaObject(ref.block(), 0)); + return resultByteRef == null ? null : resultByteRef.utf8ToString(); } - BytesRef resultByteRef = ((BytesRef) toJavaObject(result, 0)); - return resultByteRef == null ? null : resultByteRef.utf8ToString(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index 27b8ed722f963..3926cc46dd883 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; @@ -83,19 +84,21 @@ protected Expression build(Source source, List args) { } public void testConstantDelimiter() { - EvalOperator.ExpressionEvaluator eval = evaluator( - new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) - ).get(driverContext()); - /* - * 58 is ascii for : and appears in the toString below. We don't convert the delimiter to a - * string because we aren't really sure it's printable. It could be a tab or a bell or some - * garbage. - */ - assert ':' == 58; - assertThat(eval.toString(), equalTo("SplitSingleByteEvaluator[str=Attribute[channel=0], delim=58]")); - assertThat( - toJavaObject(eval.eval(new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("foo:bar"), 1))), 0), - equalTo(List.of(new BytesRef("foo"), new BytesRef("bar"))) - ); + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) + ).get(driverContext()) + ) { + /* + * 58 is ascii for : and appears in the toString below. We don't convert the delimiter to a + * string because we aren't really sure it's printable. It could be a tab or a bell or some + * garbage. + */ + assert ':' == 58; + assertThat(eval.toString(), equalTo("SplitSingleByteEvaluator[str=Attribute[channel=0], delim=58]")); + try (Block.Ref ref = eval.eval(new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("foo:bar"), 1)))) { + assertThat(toJavaObject(ref.block(), 0), equalTo(List.of(new BytesRef("foo"), new BytesRef("bar")))); + } + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 5730b93aecd8d..722b2bea8060a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -129,15 +130,19 @@ public void testNegativeLength() { } private String process(String str, int start, Integer length) { - Block result = evaluator( - new Substring( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - new Literal(Source.EMPTY, start, DataTypes.INTEGER), - length == null ? null : new Literal(Source.EMPTY, length, DataTypes.INTEGER) - ) - ).get(driverContext()).eval(row(List.of(new BytesRef(str)))); - return result == null ? null : ((BytesRef) toJavaObject(result, 0)).utf8ToString(); + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Substring( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + new Literal(Source.EMPTY, start, DataTypes.INTEGER), + length == null ? null : new Literal(Source.EMPTY, length, DataTypes.INTEGER) + ) + ).get(driverContext()); + Block.Ref ref = eval.eval(row(List.of(new BytesRef(str)))) + ) { + return ref.block().isNull(0) ? null : ((BytesRef) toJavaObject(ref.block(), 0)).utf8ToString(); + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java index 3d5e7820677e4..d1c1297cd4660 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -80,6 +81,7 @@ protected boolean supportsTypes(DataType lhsType, DataType rhsType) { } public final void testApplyToAllTypes() { + // TODO replace with test cases for (DataType lhsType : EsqlDataTypes.types()) { for (DataType rhsType : EsqlDataTypes.types()) { if (supportsTypes(lhsType, rhsType) == false) { @@ -92,7 +94,9 @@ public final void testApplyToAllTypes() { Source src = new Source(Location.EMPTY, lhsType.typeName() + " " + rhsType.typeName()); if (isRepresentable(lhsType) && isRepresentable(rhsType)) { op = build(src, field("lhs", lhsType), field("rhs", rhsType)); - result = toJavaObject(evaluator(op).get(driverContext()).eval(row(List.of(lhs.value(), rhs.value()))), 0); + try (Block.Ref ref = evaluator(op).get(driverContext()).eval(row(List.of(lhs.value(), rhs.value())))) { + result = toJavaObject(ref.block(), 0); + } } else { op = build(src, lhs, rhs); result = op.fold(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index ad2cabb35cc8e..cbb4cbcd4a73e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.analysis.VerificationException; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -179,7 +180,9 @@ public void testEdgeCases() { private Object process(Object val) { if (testCase.allTypesAreRepresentable()) { Neg neg = new Neg(Source.EMPTY, field("val", typeOf(val))); - return toJavaObject(evaluator(neg).get(driverContext()).eval(row(List.of(val))), 0); + try (Block.Ref ref = evaluator(neg).get(driverContext()).eval(row(List.of(val)))) { + return toJavaObject(ref.block(), 0); + } } else { // just fold if type is not representable Neg neg = new Neg(Source.EMPTY, new Literal(Source.EMPTY, val, typeOf(val))); return neg.fold(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 0bb43539dba72..e20ba72b82e5c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -14,12 +14,14 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.RegexpQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -42,6 +44,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec.FieldSort; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; @@ -54,6 +57,7 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; +import org.elasticsearch.xpack.esql.planner.FilterTests; import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.planner.PhysicalVerificationException; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -91,6 +95,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; +import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; import static org.elasticsearch.xpack.ql.expression.Expressions.name; import static org.elasticsearch.xpack.ql.expression.Expressions.names; import static org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC; @@ -103,7 +108,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -//@TestLogging(value = "org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer:TRACE", reason = "debug") +@TestLogging(value = "org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer:TRACE", reason = "debug") public class PhysicalPlanOptimizerTests extends ESTestCase { private static final String PARAM_FORMATTING = "%1$s"; @@ -1844,7 +1849,7 @@ public void testAvgSurrogateFunctionAfterRenameAndLimit() { assertThat(limit.limit(), instanceOf(Literal.class)); assertThat(limit.limit().fold(), equalTo(10000)); var aggFinal = as(limit.child(), AggregateExec.class); - assertThat(aggFinal.getMode(), equalTo(AggregateExec.Mode.FINAL)); + assertThat(aggFinal.getMode(), equalTo(FINAL)); var aggPartial = as(aggFinal.child(), AggregateExec.class); assertThat(aggPartial.getMode(), equalTo(AggregateExec.Mode.PARTIAL)); limit = as(aggPartial.child(), LimitExec.class); @@ -1861,6 +1866,86 @@ public void testAvgSurrogateFunctionAfterRenameAndLimit() { assertThat(source.limit().fold(), equalTo(10)); } + // optimized doesn't know yet how to push down count over field + public void testCountOneFieldWithFilter() { + var plan = optimizedPlan(physicalPlan(""" + from test + | where salary > 1000 + | stats c = count(salary) + """)); + assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); + } + + // optimized doesn't know yet how to push down count over field + public void testCountOneFieldWithFilterAndLimit() { + var plan = optimizedPlan(physicalPlan(""" + from test + | where salary > 1000 + | limit 10 + | stats c = count(salary) + """)); + assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); + } + + // optimized doesn't know yet how to break down different multi count + public void testCountMultipleFieldsWithFilter() { + var plan = optimizedPlan(physicalPlan(""" + from test + | where salary > 1000 and emp_no > 10010 + | stats cs = count(salary), ce = count(emp_no) + """)); + assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); + } + + public void testCountAllWithFilter() { + var plan = optimizedPlan(physicalPlan(""" + from test + | where emp_no > 10010 + | stats c = count() + """)); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat(agg.getMode(), is(FINAL)); + assertThat(Expressions.names(agg.aggregates()), contains("c")); + var exchange = as(agg.child(), ExchangeExec.class); + var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); + assertThat(esStatsQuery.limit(), is(nullValue())); + assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen")); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); + assertThat(expected.toString(), is(esStatsQuery.query().toString())); + } + + @AwaitsFix(bugUrl = "intermediateAgg does proper reduction but the agg itself does not - the optimizer needs to improve") + public void testMultiCountAllWithFilter() { + var plan = optimizedPlan(physicalPlan(""" + from test + | where emp_no > 10010 + | stats c = count(), call = count(*), c_literal = count(1) + """)); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat(agg.getMode(), is(FINAL)); + assertThat(Expressions.names(agg.aggregates()), contains("c", "call", "c_literal")); + var exchange = as(agg.child(), ExchangeExec.class); + var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); + assertThat(esStatsQuery.limit(), is(nullValue())); + assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen")); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); + assertThat(expected.toString(), is(esStatsQuery.query().toString())); + } + + // optimized doesn't know yet how to break down different multi count + public void testCountFieldsAndAllWithFilter() { + var plan = optimizedPlan(physicalPlan(""" + from test + | where emp_no > 10010 + | stats c = count(), cs = count(salary), ce = count(emp_no) + """)); + assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); + } + private static EsQueryExec source(PhysicalPlan plan) { if (plan instanceof ExchangeExec exchange) { plan = exchange.child(); @@ -1915,4 +2000,8 @@ private QueryBuilder sv(QueryBuilder builder, String fieldName) { assertThat(sv.field(), equalTo(fieldName)); return sv.next(); } + + private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName) { + return FilterTests.singleValueQuery(inner, fieldName); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index f0d4f0534caee..904e2c2ce7e8f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -159,7 +159,7 @@ private static FieldAttribute field(String name, DataType type) { static DriverContext driverContext() { return new DriverContext( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getGlobalInstance() + BlockFactory.getNonBreakingInstance() ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 95ef6e7baf70c..f66aa9f47cb8d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -119,7 +119,7 @@ private LocalExecutionPlanner planner() throws IOException { "test", null, BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getGlobalInstance(), + BlockFactory.getNonBreakingInstance(), config(), null, null, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 937488d2ed546..640dd410d8573 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; +import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -97,6 +99,10 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class> getActions() { return List.of( - new ActionHandler<>(GetGlobalCheckpointsAction.INSTANCE, GetGlobalCheckpointsAction.TransportAction.class), + new ActionHandler<>(GetGlobalCheckpointsAction.INSTANCE, GetGlobalCheckpointsAction.LocalAction.class), new ActionHandler<>(GetGlobalCheckpointsShardAction.INSTANCE, GetGlobalCheckpointsShardAction.TransportAction.class), new ActionHandler<>(GetSecretAction.INSTANCE, TransportGetSecretAction.class), new ActionHandler<>(PostSecretAction.INSTANCE, TransportPostSecretAction.class), diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java index b7856daa8d842..1e3794a4cefe4 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -26,8 +27,8 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.core.TimeValue; @@ -54,7 +55,7 @@ public class GetGlobalCheckpointsAction extends ActionType { + public static class LocalAction extends TransportAction { private final ClusterService clusterService; private final NodeClient client; @@ -161,7 +163,7 @@ public static class TransportAction extends org.elasticsearch.action.support.Tra private final ThreadPool threadPool; @Inject - public TransportAction( + public LocalAction( final ActionFilters actionFilters, final TransportService transportService, final ClusterService clusterService, diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java index 30f24deac408b..f366a18c7393f 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.engine.frozen; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.search.ClosePointInTimeAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeAction; @@ -73,7 +73,7 @@ public void testTimestampRangeRecalculatedOnStalePrimaryAllocation() throws IOEx createIndex("index", 1, 1); - final IndexResponse indexResponse = client().prepareIndex("index") + final DocWriteResponse indexResponse = client().prepareIndex("index") .setSource(DataStream.TIMESTAMP_FIELD_NAME, "2010-01-06T02:03:04.567Z") .get(); diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java index 3a5f2c3027b08..baa2678985a33 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java @@ -7,11 +7,11 @@ package org.elasticsearch.index.engine.frozen; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClosePointInTimeAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeAction; @@ -549,7 +549,7 @@ public void testRecoveryState() { final long nbDocs = randomIntBetween(0, 50); for (long i = 0; i < nbDocs; i++) { - final IndexResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); + final DocWriteResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); assertThat(indexResponse.status(), is(RestStatus.CREATED)); } @@ -580,7 +580,7 @@ public void testTranslogStats() { final int nbDocs = randomIntBetween(0, 50); int uncommittedOps = 0; for (long i = 0; i < nbDocs; i++) { - final IndexResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); + final DocWriteResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); assertThat(indexResponse.status(), is(RestStatus.CREATED)); if (rarely()) { indicesAdmin().prepareFlush(indexName).get(); diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java index b4826e389393c..ec3973a4b5781 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java @@ -124,7 +124,7 @@ public static final class DocumentSupplier { public DocumentSupplier(DocumentVersion version, Supplier document) { this.version = version; - this.document = new CachedSupplier<>(document); + this.document = CachedSupplier.wrap(document); } public SamlServiceProviderDocument getDocument() { diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java index 4086a1a729c14..de355cd675089 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; @@ -56,10 +57,13 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; +import java.util.function.Function; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -798,6 +802,129 @@ public void testUpdateIndexTemplateToMigrateFromDataStreamLifecycleToIlm() throw }); } + public void testGetDataStreamResponse() throws Exception { + // ILM rolls over every 2 documents + RolloverAction rolloverIlmAction = new RolloverAction(RolloverConditions.newBuilder().addMaxIndexDocsCondition(2L).build()); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of(rolloverIlmAction.getWriteableName(), rolloverIlmAction)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("hot", hotPhase)); + PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); + assertAcked(client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get()); + + putComposableIndexTemplate( + indexTemplateName, + null, + List.of(dataStreamName + "*"), + Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policy).build(), + null, + null + ); + CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).get(); + + indexDocs(dataStreamName, 2); + + // wait to rollover + assertBusy(() -> { + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices().size(), is(2)); + }); + + // prefer_ilm false in the index template + putComposableIndexTemplate( + indexTemplateName, + null, + List.of(dataStreamName + "*"), + Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policy).put(IndexSettings.PREFER_ILM, false).build(), + null, + null + ); + + client().execute( + PutDataStreamLifecycleAction.INSTANCE, + new PutDataStreamLifecycleAction.Request(new String[] { dataStreamName }, TimeValue.timeValueDays(90)) + ).actionGet(); + + // rollover again - at this point this data stream should have 2 backing indices managed by ILM and the write index managed by + // data stream lifecycle + indexDocs(dataStreamName, 2); + + assertBusy(() -> { + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + GetDataStreamAction.Response.DataStreamInfo dataStreamInfo = getDataStreamResponse.getDataStreams().get(0); + List indices = dataStreamInfo.getDataStream().getIndices(); + assertThat(indices.size(), is(3)); + + // the prefer_ilm value from the template should be reflected in the response at the top level + assertThat(dataStreamInfo.templatePreferIlmValue(), is(false)); + // the template ILM policy should still be reflected at the top level + assertThat(dataStreamInfo.getIlmPolicy(), is(policy)); + + List backingIndices = getBackingIndices(dataStreamName); + String firstGenerationIndex = backingIndices.get(0); + String secondGenerationIndex = backingIndices.get(1); + String writeIndex = backingIndices.get(2); + assertThat( + indices.stream().map(i -> i.getName()).toList(), + containsInAnyOrder(firstGenerationIndex, secondGenerationIndex, writeIndex) + ); + + Function> backingIndexSupplier = indexName -> indices.stream() + .filter(index -> index.getName().equals(indexName)) + .findFirst(); + + // let's assert the policy is reported for all indices (as it's present in the index template) and the value of the + // prefer_ilm setting remains true for the first 2 generations and is false for the write index (the generation after rollover) + Optional firstGenSettings = backingIndexSupplier.apply(firstGenerationIndex); + assertThat(firstGenSettings.isPresent(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(firstGenSettings.get()).preferIlm(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(firstGenSettings.get()).ilmPolicyName(), is(policy)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(firstGenSettings.get()).managedBy(), is(ManagedBy.ILM)); + Optional secondGenSettings = backingIndexSupplier.apply(secondGenerationIndex); + assertThat(secondGenSettings.isPresent(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(secondGenSettings.get()).preferIlm(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(secondGenSettings.get()).ilmPolicyName(), is(policy)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(secondGenSettings.get()).managedBy(), is(ManagedBy.ILM)); + Optional writeIndexSettings = backingIndexSupplier.apply(writeIndex); + assertThat(writeIndexSettings.isPresent(), is(true)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(writeIndexSettings.get()).preferIlm(), is(false)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(writeIndexSettings.get()).ilmPolicyName(), is(policy)); + assertThat(dataStreamInfo.getIndexSettingsValues().get(writeIndexSettings.get()).managedBy(), is(ManagedBy.LIFECYCLE)); + + // with the current configuratino, the next generation index will be managed by DSL + assertThat(dataStreamInfo.getNextGenerationManagedBy(), is(ManagedBy.LIFECYCLE)); + }); + + // remove ILM policy and prefer_ilm from template + putComposableIndexTemplate(indexTemplateName, null, List.of(dataStreamName + "*"), Settings.builder().build(), null, null); + GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + GetDataStreamAction.Response.DataStreamInfo dataStreamInfo = getDataStreamResponse.getDataStreams().get(0); + // since the ILM related settings are gone from the index template, this data stream should now be managed by lifecycle + assertThat(dataStreamInfo.getNextGenerationManagedBy(), is(ManagedBy.LIFECYCLE)); + + // disable data stream lifecycle on the data stream. the future generations will be UNMANAGED + client().execute( + PutDataStreamLifecycleAction.INSTANCE, + new PutDataStreamLifecycleAction.Request(new String[] { dataStreamName }, TimeValue.timeValueDays(90), false) + ).actionGet(); + + getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest).actionGet(); + assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); + dataStreamInfo = getDataStreamResponse.getDataStreams().get(0); + // since the ILM related settings are gone from the index template and the lifeclcye is disabled, this data stream should now be + // managed unmanaged + assertThat(dataStreamInfo.getNextGenerationManagedBy(), is(ManagedBy.UNMANAGED)); + } + static void indexDocs(String dataStream, int numDocs) { BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < numDocs; i++) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java index 411c29255fd78..4a70522f41848 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.inference.integration; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.inference.Model; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -20,20 +23,34 @@ import org.elasticsearch.xpack.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.PutInferenceModelAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.Before; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; public class MockInferenceServiceIT extends ESIntegTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ModelRegistry modelRegistry; + + @Before + public void createComponents() { + modelRegistry = new ModelRegistry(client()); + } + @Override protected Collection> nodePlugins() { return List.of(InferencePlugin.class, TestInferenceServicePlugin.class); @@ -54,15 +71,28 @@ protected Function getClientWrapper() { public void testMockService() { String modelId = "test-mock"; - Model putModel = putMockService(modelId, TaskType.SPARSE_EMBEDDING); - Model readModel = getModel(modelId, TaskType.SPARSE_EMBEDDING); + ModelConfigurations putModel = putMockService(modelId, TaskType.SPARSE_EMBEDDING); + ModelConfigurations readModel = getModel(modelId, TaskType.SPARSE_EMBEDDING); assertModelsAreEqual(putModel, readModel); // The response is randomly generated, the input can be anything inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, randomAlphaOfLength(10)); } - private Model putMockService(String modelId, TaskType taskType) { + public void testGetUnparsedModelMap_ForTestServiceModel_ReturnsSecretsPopulated() { + String modelId = "test-unparsed"; + putMockService(modelId, TaskType.SPARSE_EMBEDDING); + + var listener = new PlainActionFuture(); + modelRegistry.getUnparsedModelMap(modelId, listener); + + var modelConfig = listener.actionGet(TIMEOUT); + var secretsMap = removeFromMapOrThrowIfNull(modelConfig.secrets(), ModelSecrets.SECRET_SETTINGS); + var secrets = TestInferenceServicePlugin.TestSecretSettings.fromMap(secretsMap); + assertThat(secrets.apiKey(), is("abc64")); + } + + private ModelConfigurations putMockService(String modelId, TaskType taskType) { String body = """ { "service": "test_service", @@ -88,7 +118,6 @@ private Model putMockService(String modelId, TaskType taskType) { assertThat(response.getModel().getServiceSettings(), instanceOf(TestInferenceServicePlugin.TestServiceSettings.class)); var serviceSettings = (TestInferenceServicePlugin.TestServiceSettings) response.getModel().getServiceSettings(); assertEquals("my_model", serviceSettings.model()); - assertEquals("abc64", serviceSettings.apiKey()); assertThat(response.getModel().getTaskSettings(), instanceOf(TestInferenceServicePlugin.TestTaskSettings.class)); var taskSettings = (TestInferenceServicePlugin.TestTaskSettings) response.getModel().getTaskSettings(); @@ -97,7 +126,7 @@ private Model putMockService(String modelId, TaskType taskType) { return response.getModel(); } - public Model getModel(String modelId, TaskType taskType) { + public ModelConfigurations getModel(String modelId, TaskType taskType) { var response = client().execute(GetInferenceModelAction.INSTANCE, new GetInferenceModelAction.Request(modelId, taskType.toString())) .actionGet(); return response.getModel(); @@ -115,7 +144,7 @@ private void inferOnMockService(String modelId, TaskType taskType, String input) } } - private void assertModelsAreEqual(Model model1, Model model2) { + private void assertModelsAreEqual(ModelConfigurations model1, ModelConfigurations model2) { // The test can't rely on Model::equals as the specific subclass // may be different. Model loses information about it's implemented // subtype when it is streamed across the wire. diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index a400f84e3c2ec..9f079afaa24e5 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; @@ -99,13 +100,14 @@ public void testGetModel() throws Exception { assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); - UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config()); - assertEquals(model.getService(), unparsedModel.service()); + UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config(), modelHolder.get().secrets()); + assertEquals(model.getConfigurations().getService(), unparsedModel.service()); ElserMlNodeModel roundTripModel = ElserMlNodeService.parseConfig( false, unparsedModel.modelId(), unparsedModel.taskType(), - unparsedModel.settings() + unparsedModel.settings(), + unparsedModel.secrets() ); assertEquals(model, roundTripModel); } @@ -179,17 +181,19 @@ protected void blockingCall(Consumer> function, AtomicRefe latch.await(); } - private static ModelWithUnknownField buildModelWithUnknownField(String modelId) { - return new ModelWithUnknownField( - modelId, - TaskType.SPARSE_EMBEDDING, - ElserMlNodeService.NAME, - ElserMlNodeServiceSettingsTests.createRandom(), - ElserMlNodeTaskSettingsTests.createRandom() + private static Model buildModelWithUnknownField(String modelId) { + return new Model( + new ModelWithUnknownField( + modelId, + TaskType.SPARSE_EMBEDDING, + ElserMlNodeService.NAME, + ElserMlNodeServiceSettingsTests.createRandom(), + ElserMlNodeTaskSettingsTests.createRandom() + ) ); } - private static class ModelWithUnknownField extends Model { + private static class ModelWithUnknownField extends ModelConfigurations { ModelWithUnknownField( String modelId, diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java index b9b0d68054ef5..61837336f291b 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java @@ -17,6 +17,9 @@ import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; @@ -45,40 +48,25 @@ public List getInferenceServiceFactories() { public List getInferenceServiceNamedWriteables() { return List.of( new NamedWriteableRegistry.Entry(ServiceSettings.class, TestServiceSettings.NAME, TestServiceSettings::new), - new NamedWriteableRegistry.Entry(TaskSettings.class, TestTaskSettings.NAME, TestTaskSettings::new) + new NamedWriteableRegistry.Entry(TaskSettings.class, TestTaskSettings.NAME, TestTaskSettings::new), + new NamedWriteableRegistry.Entry(SecretSettings.class, TestSecretSettings.NAME, TestSecretSettings::new) ); } - public class TestInferenceService implements InferenceService { + public static class TestInferenceService implements InferenceService { private static final String NAME = "test_service"; - public static TestServiceModel parseConfig( - boolean throwOnUnknownFields, - String modelId, - TaskType taskType, - Map settings - ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, Model.SERVICE_SETTINGS); - var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); - + private static Map getTaskSettingsMap(Map settings) { Map taskSettingsMap; // task settings are optional - if (settings.containsKey(Model.TASK_SETTINGS)) { - taskSettingsMap = removeFromMapOrThrowIfNull(settings, Model.TASK_SETTINGS); + if (settings.containsKey(ModelConfigurations.TASK_SETTINGS)) { + taskSettingsMap = removeFromMapOrThrowIfNull(settings, ModelConfigurations.TASK_SETTINGS); } else { taskSettingsMap = Map.of(); } - var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); - - if (throwOnUnknownFields) { - throwIfNotEmptyMap(settings, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); - } - - return new TestServiceModel(modelId, taskType, NAME, serviceSettings, taskSettings); + return taskSettingsMap; } public TestInferenceService(InferenceServicePlugin.InferenceServiceFactoryContext context) { @@ -91,22 +79,47 @@ public String name() { } @Override - public TestServiceModel parseConfigStrict(String modelId, TaskType taskType, Map config) { - return parseConfig(true, modelId, taskType, config); + public TestServiceModel parseRequestConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); + var secretSettings = TestSecretSettings.fromMap(serviceSettingsMap); + + var taskSettingsMap = getTaskSettingsMap(config); + var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); + + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); + + return new TestServiceModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings); } @Override - public TestServiceModel parseConfigLenient(String modelId, TaskType taskType, Map config) { - return parseConfig(false, modelId, taskType, config); + public TestServiceModel parsePersistedConfig( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + + var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); + var secretSettings = TestSecretSettings.fromMap(secretSettingsMap); + + var taskSettingsMap = getTaskSettingsMap(config); + var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); + + return new TestServiceModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings); } @Override public void infer(Model model, String input, Map taskSettings, ActionListener listener) { - switch (model.getTaskType()) { + switch (model.getConfigurations().getTaskType()) { case SPARSE_EMBEDDING -> listener.onResponse(TextExpansionResultsTests.createRandomResults(1, 10)); default -> listener.onFailure( new ElasticsearchStatusException( - TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME), + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), RestStatus.BAD_REQUEST ) ); @@ -127,9 +140,10 @@ public TestServiceModel( TaskType taskType, String service, TestServiceSettings serviceSettings, - TestTaskSettings taskSettings + TestTaskSettings taskSettings, + TestSecretSettings secretSettings ) { - super(modelId, taskType, service, serviceSettings, taskSettings); + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); } @Override @@ -141,9 +155,14 @@ public TestServiceSettings getServiceSettings() { public TestTaskSettings getTaskSettings() { return (TestTaskSettings) super.getTaskSettings(); } + + @Override + public TestSecretSettings getSecretSettings() { + return (TestSecretSettings) super.getSecretSettings(); + } } - public record TestServiceSettings(String model, String apiKey) implements ServiceSettings { + public record TestServiceSettings(String model) implements ServiceSettings { private static final String NAME = "test_service_settings"; @@ -151,31 +170,28 @@ public static TestServiceSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); String model = MapParsingUtils.removeAsType(map, "model", String.class); - String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class); if (model == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("model", Model.SERVICE_SETTINGS)); - } - if (apiKey == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", Model.SERVICE_SETTINGS)); + validationException.addValidationError( + MapParsingUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS) + ); } if (validationException.validationErrors().isEmpty() == false) { throw validationException; } - return new TestServiceSettings(model, apiKey); + return new TestServiceSettings(model); } public TestServiceSettings(StreamInput in) throws IOException { - this(in.readString(), in.readString()); + this(in.readString()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("model", model); - builder.field("api_key", apiKey); builder.endObject(); return builder; } @@ -193,7 +209,6 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(model); - out.writeString(apiKey); } } @@ -235,4 +250,52 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests } } + + public record TestSecretSettings(String apiKey) implements SecretSettings { + + private static final String NAME = "test_secret_settings"; + + public static TestSecretSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class); + + if (apiKey == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS)); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestSecretSettings(apiKey); + } + + public TestSecretSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(apiKey); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("api_key", apiKey); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index ba0f1b142a799..f84b841066c01 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -130,6 +130,17 @@ public Collection getSystemIndexDescriptors(Settings sett .setSettings(InferenceIndex.settings()) .setVersionMetaKey("version") .setOrigin(ClientHelper.INFERENCE_ORIGIN) + .build(), + SystemIndexDescriptor.builder() + .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) + .setIndexPattern(InferenceSecretsIndex.INDEX_PATTERN) + .setPrimaryIndex(InferenceSecretsIndex.INDEX_NAME) + .setDescription("Contains inference service secrets") + .setMappings(InferenceSecretsIndex.mappings()) + .setSettings(InferenceSecretsIndex.settings()) + .setVersionMetaKey("version") + .setOrigin(ClientHelper.INFERENCE_ORIGIN) + .setNetNew() .build() ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceSecretsIndex.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceSecretsIndex.java new file mode 100644 index 0000000000000..a29682620ed95 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceSecretsIndex.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.io.UncheckedIOException; + +import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; + +public class InferenceSecretsIndex { + + private InferenceSecretsIndex() {} + + public static final String INDEX_NAME = ".infer-secrets"; + public static final String INDEX_PATTERN = INDEX_NAME + "*"; + + // Increment this version number when the mappings change + private static final int INDEX_MAPPING_VERSION = 1; + + public static Settings settings() { + return Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") + .build(); + } + + /** + * Reject any unknown fields being added by setting dynamic mappings to + * {@code strict} for the top level object. A document that contains unknown + * fields in the document root will be rejected at index time. + * + * The {@code secrets} object + * have dynamic mappings set to {@code false} which means all fields will + * be accepted without throwing an error but those fields are not indexed. + * + * The reason for mixing {@code strict} and {@code false} dynamic settings + * is that {@code secrets} is defined by + * the inference services and therefore are not known when creating the + * index. However, the top level settings are known in advance and can + * be strictly mapped. + * + * If the top level strict mapping changes then the no new documents should + * be indexed until the index mappings have been updated, this happens + * automatically once all nodes in the cluster are of a compatible version. + * + * @return The index mappings + */ + public static XContentBuilder mappings() { + try { + return jsonBuilder().startObject() + .startObject(SINGLE_MAPPING_NAME) + .startObject("_meta") + .field("version", Version.CURRENT) + .field(SystemIndexDescriptor.VERSION_META_KEY, INDEX_MAPPING_VERSION) + .endObject() + .field("dynamic", "strict") + .startObject("properties") + .startObject("secret_settings") + .field("dynamic", "false") + .startObject("properties") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + } catch (IOException e) { + throw new UncheckedIOException("Failed to build mappings for index " + INDEX_NAME, e); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java index b6dd41df174e5..03e0f4d8a4543 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java @@ -8,21 +8,21 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import java.util.Map; -public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings) { +public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings, Map secrets) { - public static UnparsedModel unparsedModelFromMap(Map sourceMap) { - String modelId = removeStringOrThrowIfNull(sourceMap, Model.MODEL_ID); - String service = removeStringOrThrowIfNull(sourceMap, Model.SERVICE); - String taskTypeStr = removeStringOrThrowIfNull(sourceMap, TaskType.NAME); + public static UnparsedModel unparsedModelFromMap(Map configMap, Map secretsMap) { + String modelId = removeStringOrThrowIfNull(configMap, ModelConfigurations.MODEL_ID); + String service = removeStringOrThrowIfNull(configMap, ModelConfigurations.SERVICE); + String taskTypeStr = removeStringOrThrowIfNull(configMap, TaskType.NAME); TaskType taskType = TaskType.fromString(taskTypeStr); - return new UnparsedModel(modelId, taskType, service, sourceMap); + return new UnparsedModel(modelId, taskType, service, configMap, secretsMap); } private static String removeStringOrThrowIfNull(Map sourceMap, String fieldName) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java index 6c59fc89fd152..45b9474cebcdc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -104,18 +104,18 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private final Model model; + private final ModelConfigurations model; - public Response(Model model) { + public Response(ModelConfigurations model) { this.model = model; } public Response(StreamInput in) throws IOException { super(in); - model = new Model(in); + model = new ModelConfigurations(in); } - public Model getModel() { + public ModelConfigurations getModel() { return model; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 1e208e83985cb..8a8ea81653644 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -45,7 +45,7 @@ protected void doExecute( ActionListener listener ) { modelRegistry.getUnparsedModelMap(request.getModelId(), ActionListener.wrap(modelConfigMap -> { - var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config()); + var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config(), modelConfigMap.secrets()); var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { listener.onFailure( @@ -58,8 +58,9 @@ protected void doExecute( ); return; } - var model = service.get().parseConfigLenient(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()); - listener.onResponse(new PutInferenceModelAction.Response(model)); + var model = service.get() + .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets()); + listener.onResponse(new PutInferenceModelAction.Response(model.getConfigurations())); }, listener::onFailure)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index aab8ed98f4241..386243e43a1a0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -42,7 +42,7 @@ public TransportInferenceAction( protected void doExecute(Task task, InferenceAction.Request request, ActionListener listener) { ActionListener getModelListener = ActionListener.wrap(modelConfigMap -> { - var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config()); + var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config(), modelConfigMap.secrets()); var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { listener.onFailure( @@ -68,7 +68,8 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe return; } - var model = service.get().parseConfigLenient(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()); + var model = service.get() + .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets()); inferOnService(model, request, service.get(), listener); }, listener::onFailure); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 8ab09bafbd248..b0995e5405b2f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -74,7 +75,7 @@ protected void masterOperation( ) throws Exception { var requestAsMap = requestToMap(request); - String serviceName = (String) requestAsMap.remove(Model.SERVICE); + String serviceName = (String) requestAsMap.remove(ModelConfigurations.SERVICE); if (serviceName == null) { listener.onFailure(new ElasticsearchStatusException("Model configuration is missing a service", RestStatus.BAD_REQUEST)); return; @@ -86,7 +87,7 @@ protected void masterOperation( return; } - var model = service.get().parseConfigStrict(request.getModelId(), request.getTaskType(), requestAsMap); + var model = service.get().parseRequestConfig(request.getModelId(), request.getTaskType(), requestAsMap); // model is valid good to persist then start this.modelRegistry.storeModel( model, @@ -97,7 +98,10 @@ protected void masterOperation( private static void startModel(InferenceService service, Model model, ActionListener listener) { service.start( model, - ActionListener.wrap(ok -> listener.onResponse(new PutInferenceModelAction.Response(model)), listener::onFailure) + ActionListener.wrap( + ok -> listener.onResponse(new PutInferenceModelAction.Response(model.getConfigurations())), + listener::onFailure + ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 4403ec53e7a13..aec87ed1765d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.registry; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; @@ -14,6 +16,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -27,19 +31,27 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.inference.Model; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.inference.InferenceIndex; +import org.elasticsearch.xpack.inference.InferenceSecretsIndex; import java.io.IOException; +import java.util.Arrays; import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.core.Strings.format; public class ModelRegistry { - public record ModelConfigMap(Map config) {} + public record ModelConfigMap(Map config, Map secrets) {} + private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; public ModelRegistry(Client client) { @@ -48,46 +60,160 @@ public ModelRegistry(Client client) { public void getUnparsedModelMap(String modelId, ActionListener listener) { ActionListener searchListener = ActionListener.wrap(searchResponse -> { + // There should be a hit for the configurations and secrets if (searchResponse.getHits().getHits().length == 0) { listener.onFailure(new ResourceNotFoundException("Model not found [{}]", modelId)); return; } var hits = searchResponse.getHits().getHits(); - assert hits.length == 1; - listener.onResponse(new ModelConfigMap(hits[0].getSourceAsMap())); + listener.onResponse(createModelConfigMap(hits, modelId)); }, listener::onFailure); QueryBuilder queryBuilder = documentIdQuery(modelId); - SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN).setQuery(queryBuilder).setSize(1).request(); + SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN) + .setQuery(queryBuilder) + .setSize(2) + .request(); client.search(modelSearch, searchListener); } + private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) { + Map mappedHits = Arrays.stream(hits).collect(Collectors.toMap(hit -> { + if (hit.getIndex().startsWith(InferenceIndex.INDEX_NAME)) { + return InferenceIndex.INDEX_NAME; + } + + if (hit.getIndex().startsWith(InferenceSecretsIndex.INDEX_NAME)) { + return InferenceSecretsIndex.INDEX_NAME; + } + + logger.error(format("Found invalid index for model [%s] at index [%s]", modelId, hit.getIndex())); + throw new IllegalArgumentException( + format( + "Invalid result while loading model [%s] index: [%s]. Try deleting and reinitializing the service", + modelId, + hit.getIndex() + ) + ); + }, Function.identity())); + + if (mappedHits.containsKey(InferenceIndex.INDEX_NAME) == false + || mappedHits.containsKey(InferenceSecretsIndex.INDEX_NAME) == false + || mappedHits.size() > 2) { + logger.error(format("Failed to load model [%s], found model parts from index prefixes: [%s]", modelId, mappedHits.keySet())); + throw new IllegalStateException( + format("Failed to load model, model [%s] is in an invalid state. Try deleting and reinitializing the service", modelId) + ); + } + + return new ModelConfigMap( + mappedHits.get(InferenceIndex.INDEX_NAME).getSourceAsMap(), + mappedHits.get(InferenceSecretsIndex.INDEX_NAME).getSourceAsMap() + ); + } + public void storeModel(Model model, ActionListener listener) { - IndexRequest request = createIndexRequest(Model.documentId(model.getModelId()), InferenceIndex.INDEX_NAME, model, false); - request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + ActionListener bulkResponseActionListener = getStoreModelListener(model, listener); + + IndexRequest configRequest = createIndexRequest( + Model.documentId(model.getConfigurations().getModelId()), + InferenceIndex.INDEX_NAME, + model.getConfigurations(), + false + ); + + IndexRequest secretsRequest = createIndexRequest( + Model.documentId(model.getConfigurations().getModelId()), + InferenceSecretsIndex.INDEX_NAME, + model.getSecrets(), + false + ); + + client.prepareBulk() + .add(configRequest) + .add(secretsRequest) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute(bulkResponseActionListener); + } + + private static ActionListener getStoreModelListener(Model model, ActionListener listener) { + return ActionListener.wrap(bulkItemResponses -> { + var modelId = model.getConfigurations().getModelId(); + + if (bulkItemResponses.getItems().length == 0) { + logger.error(format("Storing model [%s] failed, no items were received from the bulk response", modelId)); - client.index(request, ActionListener.wrap(indexResponse -> listener.onResponse(true), e -> { - if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) { - listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", model.getModelId())); - } else { listener.onFailure( new ElasticsearchStatusException( - "Failed to store inference model [{}]", - RestStatus.INTERNAL_SERVER_ERROR, - e, - model.getModelId() + format( + "Failed to store inference model [%s], invalid bulk response received. Try reinitializing the service", + modelId + ), + RestStatus.INTERNAL_SERVER_ERROR ) ); + return; } - })); + + BulkItemResponse.Failure failure = getFirstBulkFailure(bulkItemResponses); + + if (failure == null) { + listener.onResponse(true); + return; + } + + logBulkFailures(model.getConfigurations().getModelId(), bulkItemResponses); + + if (ExceptionsHelper.unwrapCause(failure.getCause()) instanceof VersionConflictEngineException) { + listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", modelId)); + return; + } + + listener.onFailure( + new ElasticsearchStatusException( + format("Failed to store inference model [%s]", modelId), + RestStatus.INTERNAL_SERVER_ERROR, + failure.getCause() + ) + ); + }, e -> { + String errorMessage = format("Failed to store inference model [%s]", model.getConfigurations().getModelId()); + logger.error(errorMessage, e); + listener.onFailure(new ElasticsearchStatusException(errorMessage, RestStatus.INTERNAL_SERVER_ERROR, e)); + }); + } + + private static void logBulkFailures(String modelId, BulkResponse bulkResponse) { + for (BulkItemResponse item : bulkResponse.getItems()) { + if (item.isFailed()) { + logger.error( + format( + "Failed to store inference model [%s] index: [%s] bulk failure message [%s]", + modelId, + item.getIndex(), + item.getFailureMessage() + ) + ); + } + } + } + + private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkResponse) { + for (BulkItemResponse item : bulkResponse.getItems()) { + if (item.isFailed()) { + return item.getFailure(); + } + } + + return null; } public void deleteModel(String modelId, ActionListener listener) { DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); - request.indices(InferenceIndex.INDEX_PATTERN); + request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); request.setQuery(documentIdQuery(modelId)); request.setRefresh(true); @@ -106,7 +232,7 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T return request.opType(operation).id(docId).source(source); } catch (IOException ex) { - throw new ElasticsearchException("Unexpected serialization exception for [" + docId + "]", ex); + throw new ElasticsearchException(format("Unexpected serialization exception for index [%s] doc [%s]", indexName, docId), ex); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java index 6c7e36e5d81ee..a317992bc7c40 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.elser; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; public class ElserMlNodeModel extends Model { @@ -19,7 +20,7 @@ public ElserMlNodeModel( ElserMlNodeServiceSettings serviceSettings, ElserMlNodeTaskSettings taskSettings ) { - super(modelId, taskType, service, serviceSettings, taskSettings); + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings)); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index 45acc467b047b..f8e8584a6a382 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -14,6 +14,7 @@ import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.rest.RestStatus; @@ -40,15 +41,16 @@ public static ElserMlNodeModel parseConfig( boolean throwOnUnknownFields, String modelId, TaskType taskType, - Map settings + Map settings, + Map secrets ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, Model.SERVICE_SETTINGS); + Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, ModelConfigurations.SERVICE_SETTINGS); var serviceSettings = serviceSettingsFromMap(serviceSettingsMap); Map taskSettingsMap; // task settings are optional - if (settings.containsKey(Model.TASK_SETTINGS)) { - taskSettingsMap = removeFromMapOrThrowIfNull(settings, Model.TASK_SETTINGS); + if (settings.containsKey(ModelConfigurations.TASK_SETTINGS)) { + taskSettingsMap = removeFromMapOrThrowIfNull(settings, ModelConfigurations.TASK_SETTINGS); } else { taskSettingsMap = Map.of(); } @@ -71,31 +73,40 @@ public ElserMlNodeService(InferenceServicePlugin.InferenceServiceFactoryContext } @Override - public ElserMlNodeModel parseConfigStrict(String modelId, TaskType taskType, Map config) { - return parseConfig(true, modelId, taskType, config); + public ElserMlNodeModel parseRequestConfig(String modelId, TaskType taskType, Map config) { + return parseConfig(true, modelId, taskType, config, config); } @Override - public ElserMlNodeModel parseConfigLenient(String modelId, TaskType taskType, Map config) { - return parseConfig(false, modelId, taskType, config); + public ElserMlNodeModel parsePersistedConfig( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + return parseConfig(false, modelId, taskType, config, secrets); } @Override public void start(Model model, ActionListener listener) { if (model instanceof ElserMlNodeModel == false) { - listener.onFailure(new IllegalStateException("Error starting model, [" + model.getModelId() + "] is not an elser model")); + listener.onFailure( + new IllegalStateException("Error starting model, [" + model.getConfigurations().getModelId() + "] is not an elser model") + ); return; } - if (model.getTaskType() != TaskType.SPARSE_EMBEDDING) { - listener.onFailure(new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME))); + if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { + listener.onFailure( + new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME)) + ); return; } var elserModel = (ElserMlNodeModel) model; var serviceSettings = elserModel.getServiceSettings(); - var startRequest = new StartTrainedModelDeploymentAction.Request(ELSER_V1_MODEL, model.getModelId()); + var startRequest = new StartTrainedModelDeploymentAction.Request(ELSER_V1_MODEL, model.getConfigurations().getModelId()); startRequest.setNumberOfAllocations(serviceSettings.getNumAllocations()); startRequest.setThreadsPerAllocation(serviceSettings.getNumThreads()); startRequest.setWaitForState(STARTED); @@ -111,15 +122,18 @@ public void start(Model model, ActionListener listener) { public void infer(Model model, String input, Map taskSettings, ActionListener listener) { // No task settings to override with requestTaskSettings - if (model.getTaskType() != TaskType.SPARSE_EMBEDDING) { + if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { listener.onFailure( - new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME), RestStatus.BAD_REQUEST) + new ElasticsearchStatusException( + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), + RestStatus.BAD_REQUEST + ) ); return; } var request = InferTrainedModelDeploymentAction.Request.forTextInput( - model.getModelId(), + model.getConfigurations().getModelId(), TextExpansionConfigUpdate.EMPTY_UPDATE, List.of(input), TimeValue.timeValueSeconds(10) // TODO get timeout from request diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java index 1314e6eab4f25..42cb491c76204 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.MapParsingUtils; @@ -46,13 +46,17 @@ public static ElserMlNodeServiceSettings fromMap(Map map) { Integer numThreads = MapParsingUtils.removeAsType(map, NUM_THREADS, Integer.class); if (numAllocations == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, Model.SERVICE_SETTINGS)); + validationException.addValidationError( + MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) + ); } else if (numAllocations < 1) { validationException.addValidationError(mustBeAPositiveNumberError(NUM_ALLOCATIONS, numAllocations)); } if (numThreads == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, Model.SERVICE_SETTINGS)); + validationException.addValidationError( + MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS) + ); } else if (numThreads < 1) { validationException.addValidationError(mustBeAPositiveNumberError(NUM_THREADS, numThreads)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java index c1d84af5b5fbe..c494cab08d8ae 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java @@ -57,7 +57,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { // TODO Class has no members all instances are equivalent - // Return the hash of NAME to make the serialization tests poss + // Return the hash of NAME to make the serialization tests pass return Objects.hash(NAME); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java similarity index 72% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java index 778f4703767a6..3adfcd29b0f7a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; @@ -17,31 +17,37 @@ import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettingsTests; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; -public class ModelTests extends AbstractWireSerializingTestCase { +public class ModelConfigurationsTests extends AbstractWireSerializingTestCase { - public static Model createRandomInstance() { + public static ModelConfigurations createRandomInstance() { // TODO randomise task types and settings var taskType = TaskType.SPARSE_EMBEDDING; - return new Model(randomAlphaOfLength(6), taskType, randomAlphaOfLength(6), randomServiceSettings(), randomTaskSettings(taskType)); + return new ModelConfigurations( + randomAlphaOfLength(6), + taskType, + randomAlphaOfLength(6), + randomServiceSettings(), + randomTaskSettings(taskType) + ); } - public static Model mutateTestInstance(Model instance) { + public static ModelConfigurations mutateTestInstance(ModelConfigurations instance) { switch (randomIntBetween(0, 2)) { - case 0 -> new Model( + case 0 -> new ModelConfigurations( instance.getModelId() + "foo", instance.getTaskType(), instance.getService(), instance.getServiceSettings(), instance.getTaskSettings() ); - case 1 -> new Model( + case 1 -> new ModelConfigurations( instance.getModelId(), TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length], instance.getService(), instance.getServiceSettings(), instance.getTaskSettings() ); - case 2 -> new Model( + case 2 -> new ModelConfigurations( instance.getModelId(), instance.getTaskType(), instance.getService() + "bar", @@ -67,17 +73,17 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { } @Override - protected Writeable.Reader instanceReader() { - return Model::new; + protected Writeable.Reader instanceReader() { + return ModelConfigurations::new; } @Override - protected Model createTestInstance() { + protected ModelConfigurations createTestInstance() { return createRandomInstance(); } @Override - protected Model mutateInstance(Model instance) { + protected ModelConfigurations mutateInstance(ModelConfigurations instance) { return mutateTestInstance(instance); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java new file mode 100644 index 0000000000000..99d5aa0b2f1fa --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +public class ModelSecretsTests extends AbstractWireSerializingTestCase { + + public static ModelSecrets createRandomInstance() { + return new ModelSecrets(randomSecretSettings()); + } + + public static ModelSecrets mutateTestInstance(ModelSecrets instance) { + return createRandomInstance(); + } + + private static SecretSettings randomSecretSettings() { + return new FakeSecretSettings(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry( + List.of(new NamedWriteableRegistry.Entry(SecretSettings.class, FakeSecretSettings.NAME, FakeSecretSettings::new)) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return ModelSecrets::new; + } + + @Override + protected ModelSecrets createTestInstance() { + return createRandomInstance(); + } + + @Override + protected ModelSecrets mutateInstance(ModelSecrets instance) { + return mutateTestInstance(instance); + } + + public record FakeSecretSettings(String apiKey) implements SecretSettings { + public static final String API_KEY = "api_key"; + public static final String NAME = "fake_secret_settings"; + + FakeSecretSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(apiKey); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(API_KEY, apiKey); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.INFERENCE_MODEL_SECRETS_ADDED; + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java index 1e8c05b7b05a8..0a2ad4699cca8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java @@ -11,18 +11,18 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; -import org.elasticsearch.xpack.inference.ModelTests; +import org.elasticsearch.xpack.inference.ModelConfigurationsTests; public class PutInferenceModelResponseTests extends AbstractWireSerializingTestCase { @Override protected PutInferenceModelAction.Response createTestInstance() { - return new PutInferenceModelAction.Response(ModelTests.createRandomInstance()); + return new PutInferenceModelAction.Response(ModelConfigurationsTests.createRandomInstance()); } @Override protected PutInferenceModelAction.Response mutateInstance(PutInferenceModelAction.Response instance) { - var mutatedModel = ModelTests.mutateTestInstance(instance.getModel()); + var mutatedModel = ModelConfigurationsTests.mutateTestInstance(instance.getModel()); return new PutInferenceModelAction.Response(mutatedModel); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java new file mode 100644 index 0000000000000..43928da8ed3b3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -0,0 +1,205 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.model; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.MapParsingUtils; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomInt; + +public class TestModel extends Model { + + public static TestModel createRandomInstance() { + return new TestModel( + randomAlphaOfLength(4), + TaskType.TEXT_EMBEDDING, + randomAlphaOfLength(10), + new TestModel.TestServiceSettings(randomAlphaOfLength(4)), + new TestModel.TestTaskSettings(randomInt(3)), + new TestModel.TestSecretSettings(randomAlphaOfLength(4)) + ); + } + + public TestModel( + String modelId, + TaskType taskType, + String service, + TestServiceSettings serviceSettings, + TestTaskSettings taskSettings, + TestSecretSettings secretSettings + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + } + + @Override + public TestServiceSettings getServiceSettings() { + return (TestServiceSettings) super.getServiceSettings(); + } + + @Override + public TestTaskSettings getTaskSettings() { + return (TestTaskSettings) super.getTaskSettings(); + } + + @Override + public TestSecretSettings getSecretSettings() { + return (TestSecretSettings) super.getSecretSettings(); + } + + public record TestServiceSettings(String model) implements ServiceSettings { + + private static final String NAME = "test_service_settings"; + + public static TestServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String model = MapParsingUtils.removeAsType(map, "model", String.class); + + if (model == null) { + validationException.addValidationError( + MapParsingUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS) + ); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestServiceSettings(model); + } + + public TestServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("model", model); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(model); + } + } + + public record TestTaskSettings(Integer temperature) implements TaskSettings { + + private static final String NAME = "test_task_settings"; + + public static TestTaskSettings fromMap(Map map) { + Integer temperature = MapParsingUtils.removeAsType(map, "temperature", Integer.class); + return new TestTaskSettings(temperature); + } + + public TestTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalVInt(temperature); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (temperature != null) { + builder.field("temperature", temperature); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + } + + public record TestSecretSettings(String apiKey) implements SecretSettings { + + private static final String NAME = "test_secret_settings"; + + public static TestSecretSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class); + + if (apiKey == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS)); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestSecretSettings(apiKey); + } + + public TestSecretSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(apiKey); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("api_key", apiKey); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java new file mode 100644 index 0000000000000..bad5c88067669 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -0,0 +1,272 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.registry; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.model.TestModel; +import org.junit.After; +import org.junit.Before; + +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ModelRegistryTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ThreadPool threadPool; + + @Before + public void setUpThreadPool() { + threadPool = new TestThreadPool(getTestName()); + } + + @After + public void tearDownThreadPool() { + terminate(threadPool); + } + + public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() { + var client = mockClient(); + mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[0])); + + var registry = new ModelRegistry(client); + + var listener = new PlainActionFuture(); + registry.getUnparsedModelMap("1", listener); + + ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is("Model not found [1]")); + } + + public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIndexReceived() { + var client = mockClient(); + var unknownIndexHit = SearchHit.createFromMap(Map.of("_index", "unknown_index")); + mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit })); + + var registry = new ModelRegistry(client); + + var listener = new PlainActionFuture(); + registry.getUnparsedModelMap("1", listener); + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is("Invalid result while loading model [1] index: [unknown_index]. Try deleting and reinitializing the service") + ); + } + + public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceEntry() { + var client = mockClient(); + var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".infer-secrets")); + mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit })); + + var registry = new ModelRegistry(client); + + var listener = new PlainActionFuture(); + registry.getUnparsedModelMap("1", listener); + + IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is("Failed to load model, model [1] is in an invalid state. Try deleting and reinitializing the service") + ); + } + + public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceSecretsEntry() { + var client = mockClient(); + var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); + + var registry = new ModelRegistry(client); + + var listener = new PlainActionFuture(); + registry.getUnparsedModelMap("1", listener); + + IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is("Failed to load model, model [1] is in an invalid state. Try deleting and reinitializing the service") + ); + } + + public void testGetUnparsedModelMap_ReturnsModelConfigMap_WhenBothInferenceAndSecretsHitsAreFound() { + var client = mockClient(); + var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".infer-secrets")); + + mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); + + var registry = new ModelRegistry(client); + + var listener = new PlainActionFuture(); + registry.getUnparsedModelMap("1", listener); + + var modelConfig = listener.actionGet(TIMEOUT); + assertThat(modelConfig.config(), nullValue()); + assertThat(modelConfig.secrets(), nullValue()); + } + + public void testStoreModel_ReturnsTrue_WhenNoFailuresOccur() { + var client = mockBulkClient(); + + var bulkItem = mock(BulkItemResponse.class); + when(bulkItem.isFailed()).thenReturn(false); + var bulkResponse = mock(BulkResponse.class); + when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { bulkItem }); + + mockClientExecuteBulk(client, bulkResponse); + + var model = TestModel.createRandomInstance(); + var registry = new ModelRegistry(client); + var listener = new PlainActionFuture(); + + registry.storeModel(model, listener); + + assertTrue(listener.actionGet(TIMEOUT)); + } + + public void testStoreModel_ThrowsException_WhenBulkResponseIsEmpty() { + var client = mockBulkClient(); + + var bulkResponse = mock(BulkResponse.class); + when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[0]); + + mockClientExecuteBulk(client, bulkResponse); + + var model = TestModel.createRandomInstance(); + var registry = new ModelRegistry(client); + var listener = new PlainActionFuture(); + + registry.storeModel(model, listener); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is( + format( + "Failed to store inference model [%s], invalid bulk response received. Try reinitializing the service", + model.getConfigurations().getModelId() + ) + ) + ); + } + + public void testStoreModel_ThrowsResourceAlreadyExistsException_WhenFailureIsAVersionConflict() { + var client = mockBulkClient(); + + var bulkItem = mock(BulkItemResponse.class); + when(bulkItem.isFailed()).thenReturn(true); + + var failure = new BulkItemResponse.Failure("index", "id", mock(VersionConflictEngineException.class)); + when(bulkItem.getFailure()).thenReturn(failure); + var bulkResponse = mock(BulkResponse.class); + when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { bulkItem }); + + mockClientExecuteBulk(client, bulkResponse); + + var model = TestModel.createRandomInstance(); + var registry = new ModelRegistry(client); + var listener = new PlainActionFuture(); + + registry.storeModel(model, listener); + + ResourceAlreadyExistsException exception = expectThrows(ResourceAlreadyExistsException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is(format("Inference model [%s] already exists", model.getConfigurations().getModelId()))); + } + + public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { + var client = mockBulkClient(); + + var bulkItem = mock(BulkItemResponse.class); + when(bulkItem.isFailed()).thenReturn(true); + + var failure = new BulkItemResponse.Failure("index", "id", mock(IllegalStateException.class)); + when(bulkItem.getFailure()).thenReturn(failure); + var bulkResponse = mock(BulkResponse.class); + when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { bulkItem }); + + mockClientExecuteBulk(client, bulkResponse); + + var model = TestModel.createRandomInstance(); + var registry = new ModelRegistry(client); + var listener = new PlainActionFuture(); + + registry.storeModel(model, listener); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(exception.getMessage(), is(format("Failed to store inference model [%s]", model.getConfigurations().getModelId()))); + } + + private Client mockBulkClient() { + var client = mockClient(); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE)); + + return client; + } + + private Client mockClient() { + var client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + + return client; + } + + private static void mockClientExecuteSearch(Client client, SearchResponse searchResponse) { + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; + actionListener.onResponse(searchResponse); + return Void.TYPE; + }).when(client).execute(any(), any(), any()); + } + + private static void mockClientExecuteBulk(Client client, BulkResponse bulkResponse) { + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; + actionListener.onResponse(bulkResponse); + return Void.TYPE; + }).when(client).execute(any(), any(), any()); + } + + private static SearchResponse mockSearchResponse(SearchHit[] hits) { + SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 1); + + var searchResponse = mock(SearchResponse.class); + when(searchResponse.getHits()).thenReturn(searchHits); + + return searchResponse; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java index 0449c1b4a7d59..1ab580eec358b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java @@ -10,10 +10,12 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.internal.Client; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -40,12 +42,12 @@ public void testParseConfigStrict() { var settings = new HashMap(); settings.put( - Model.SERVICE_SETTINGS, + ModelConfigurations.SERVICE_SETTINGS, new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) ); - settings.put(Model.TASK_SETTINGS, Map.of()); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); - ElserMlNodeModel parsedModel = service.parseConfigStrict("foo", TaskType.SPARSE_EMBEDDING, settings); + ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings); assertEquals( new ElserMlNodeModel( @@ -64,11 +66,11 @@ public void testParseConfigStrictWithNoTaskSettings() { var settings = new HashMap(); settings.put( - Model.SERVICE_SETTINGS, + ModelConfigurations.SERVICE_SETTINGS, new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) ); - ElserMlNodeModel parsedModel = service.parseConfigStrict("foo", TaskType.SPARSE_EMBEDDING, settings); + ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings); assertEquals( new ElserMlNodeModel( @@ -88,52 +90,76 @@ public void testParseConfigStrictWithUnknownSettings() { { var settings = new HashMap(); settings.put( - Model.SERVICE_SETTINGS, + ModelConfigurations.SERVICE_SETTINGS, new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) ); - settings.put(Model.TASK_SETTINGS, Map.of()); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); settings.put("foo", "bar"); if (throwOnUnknown) { var e = expectThrows( ElasticsearchStatusException.class, - () -> ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings) + () -> ElserMlNodeService.parseConfig( + throwOnUnknown, + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ) ); assertThat( e.getMessage(), containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser_mlnode] service") ); } else { - var parsed = ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings); + var parsed = ElserMlNodeService.parseConfig( + throwOnUnknown, + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); } } { var settings = new HashMap(); settings.put( - Model.SERVICE_SETTINGS, + ModelConfigurations.SERVICE_SETTINGS, new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) ); - settings.put(Model.TASK_SETTINGS, Map.of("foo", "bar")); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of("foo", "bar")); if (throwOnUnknown) { var e = expectThrows( ElasticsearchStatusException.class, - () -> ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings) + () -> ElserMlNodeService.parseConfig( + throwOnUnknown, + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ) ); assertThat( e.getMessage(), containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser_mlnode] service") ); } else { - var parsed = ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings); + var parsed = ElserMlNodeService.parseConfig( + throwOnUnknown, + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); } } { var settings = new HashMap(); settings.put( - Model.SERVICE_SETTINGS, + ModelConfigurations.SERVICE_SETTINGS, new HashMap<>( Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4, "foo", "bar") ) @@ -142,14 +168,26 @@ public void testParseConfigStrictWithUnknownSettings() { if (throwOnUnknown) { var e = expectThrows( ElasticsearchStatusException.class, - () -> ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings) + () -> ElserMlNodeService.parseConfig( + throwOnUnknown, + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ) ); assertThat( e.getMessage(), containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser_mlnode] service") ); } else { - var parsed = ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings); + var parsed = ElserMlNodeService.parseConfig( + throwOnUnknown, + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); } } } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestGetPipelineAction.java index d45d9173349f2..d1013999732c9 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestGetPipelineAction.java @@ -17,9 +17,7 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.logstash.action.GetPipelineAction; import org.elasticsearch.xpack.logstash.action.GetPipelineRequest; -import org.elasticsearch.xpack.logstash.action.GetPipelineResponse; -import java.io.IOException; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -38,20 +36,17 @@ public List routes() { } @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { final List ids = List.of(request.paramAsStringArray("id", Strings.EMPTY_ARRAY)); return restChannel -> client.execute( GetPipelineAction.INSTANCE, new GetPipelineRequest(ids), - new RestToXContentListener<>(restChannel) { - @Override - protected RestStatus getStatus(GetPipelineResponse response) { - if (response.pipelines().isEmpty() && ids.isEmpty() == false) { - return RestStatus.NOT_FOUND; - } - return RestStatus.OK; + new RestToXContentListener<>(restChannel, r -> { + if (r.pipelines().isEmpty() && ids.isEmpty() == false) { + return RestStatus.NOT_FOUND; } - } + return RestStatus.OK; + }) ); } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index 2afeda1f13512..d13d910613d31 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.ml.packageloader; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.bootstrap.BootstrapCheck; @@ -44,10 +44,11 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin // re-using thread pool setup by the ml plugin public static final String UTILITY_THREAD_POOL_NAME = "ml_utility"; + // This link will be invalid for serverless, but serverless will never be + // air-gapped, so this message should never be needed. private static final String MODEL_REPOSITORY_DOCUMENTATION_LINK = format( - "https://www.elastic.co/guide/en/machine-learning/%d.%d/ml-nlp-elser.html#air-gapped-install", - Version.CURRENT.major, - Version.CURRENT.minor + "https://www.elastic.co/guide/en/machine-learning/%s/ml-nlp-elser.html#air-gapped-install", + Build.current().version().replaceFirst("^(\\d+\\.\\d+).*", "$1") ); public MachineLearningPackageLoader() {} diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index de4857860f561..0b3dda1e365ed 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact' @@ -78,15 +80,20 @@ dependencies { api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}" implementation 'org.ojalgo:ojalgo:51.2.0' - nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:deps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:${mlCppVersion()}:deps@zip") { changing = true } - nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:nodeps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:${mlCppVersion()}:nodeps@zip") { changing = true } testImplementation 'org.ini4j:ini4j:0.5.2' } +def mlCppVersion(){ + return (project.gradle.parent != null && BuildParams.isSnapshotBuild() == false) ? + (project.version + "-SNAPSHOT") : project.version; +} + artifacts { // normal es plugins do not publish the jar but we need to since users need it for extensions archives tasks.named("jar") diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index 1b12eae165c29..027be542f00cb 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -241,7 +240,7 @@ private void persistModelSnapshotDoc(String jobId, String snapshotId, Date times modelSnapshotBuilder.build().toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); indexRequest.source(xContentBuilder); - IndexResponse indexResponse = client().execute(IndexAction.INSTANCE, indexRequest).actionGet(); + DocWriteResponse indexResponse = client().execute(IndexAction.INSTANCE, indexRequest).actionGet(); assertThat(indexResponse.getResult(), is(DocWriteResponse.Result.CREATED)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java index 6dd463835c2d7..9852517ff0231 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -127,7 +126,7 @@ private void persistModelSnapshotDoc(String jobId, String snapshotId, Date times modelSnapshotBuilder.build().toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); indexRequest.source(xContentBuilder); - IndexResponse indexResponse = client().execute(IndexAction.INSTANCE, indexRequest).actionGet(); + DocWriteResponse indexResponse = client().execute(IndexAction.INSTANCE, indexRequest).actionGet(); assertThat(indexResponse.getResult(), is(DocWriteResponse.Result.CREATED)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index ceafc9c8f349b..4e82720532454 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHits; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -200,10 +199,7 @@ private void testRunJobInTwoPartsAndRevertSnapshotAndRunToCompletion(String jobI GetJobsStatsAction.Response.JobStats statsBeforeRevert = getJobStats(jobId).get(0); Instant timeBeforeRevert = Instant.now(); - assertThat( - revertModelSnapshot(job.getId(), revertSnapshot.getSnapshotId(), deleteInterveningResults).status(), - equalTo(RestStatus.OK) - ); + revertModelSnapshot(job.getId(), revertSnapshot.getSnapshotId(), deleteInterveningResults); GetJobsStatsAction.Response.JobStats statsAfterRevert = getJobStats(job.getId()).get(0); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java index 7ae7d4b0497e0..dbf489e8abf23 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java @@ -103,8 +103,16 @@ public class TextExpansionQueryIT extends PyTorchModelRestTestCase { RAW_MODEL_SIZE = Base64.getDecoder().decode(BASE_64_ENCODED_MODEL).length; } + public void testRankFeaturesTextExpansionQuery() throws IOException { + testTextExpansionQuery("rank_features"); + } + + public void testSparseVectorTextExpansionQuery() throws IOException { + testTextExpansionQuery("sparse_vector"); + } + @SuppressWarnings("unchecked") - public void testTextExpansionQuery() throws IOException { + private void testTextExpansionQuery(String tokensFieldType) throws IOException { String modelId = "text-expansion-test"; String indexName = modelId + "-index"; @@ -140,7 +148,7 @@ public void testTextExpansionQuery() throws IOException { } // index tokens - createRankFeaturesIndex(indexName); + createIndex(indexName, tokensFieldType); bulkIndexDocs(inputs, tokenWeights, indexName); // Test text expansion search against the indexed rank features @@ -157,7 +165,15 @@ public void testTextExpansionQuery() throws IOException { } } - public void testWithPipelineIngest() throws IOException { + public void testRankFeaturesWithPipelineIngest() throws IOException { + testWithPipelineIngest("rank_features"); + } + + public void testSparseVectorWithPipelineIngest() throws IOException { + testWithPipelineIngest("sparse_vector"); + } + + private void testWithPipelineIngest(String tokensFieldType) throws IOException { String modelId = "text-expansion-pipeline-test"; String indexName = modelId + "-index"; @@ -182,7 +198,7 @@ public void testWithPipelineIngest() throws IOException { ); // index tokens - createRankFeaturesIndex(indexName); + createIndex(indexName, tokensFieldType); var pipelineId = putPipeline(modelId); bulkIndexThroughPipeline(inputs, indexName, pipelineId); @@ -201,7 +217,15 @@ public void testWithPipelineIngest() throws IOException { } } - public void testWithDotsInTokenNames() throws IOException { + public void testRankFeaturesWithDotsInTokenNames() throws IOException { + testWithDotsInTokenNames("rank_features"); + } + + public void testSparseVectorWithDotsInTokenNames() throws IOException { + testWithDotsInTokenNames("sparse_vector"); + } + + private void testWithDotsInTokenNames(String tokensFieldType) throws IOException { String modelId = "text-expansion-dots-in-tokens"; String indexName = modelId + "-index"; @@ -214,7 +238,7 @@ public void testWithDotsInTokenNames() throws IOException { List inputs = List.of("these are my words."); // index tokens - createRankFeaturesIndex(indexName); + createIndex(indexName, tokensFieldType); var pipelineId = putPipeline(modelId); bulkIndexThroughPipeline(inputs, indexName, pipelineId); @@ -278,18 +302,18 @@ protected void createTextExpansionModel(String modelId) throws IOException { client().performRequest(request); } - private void createRankFeaturesIndex(String indexName) throws IOException { + private void createIndex(String indexName, String tokensFieldType) throws IOException { Request createIndex = new Request("PUT", "/" + indexName); createIndex.setJsonEntity(""" - { - "mappings": { - "properties": { - "text_field": { - "type": "text" - }, - "ml.tokens": { - "type": "rank_features" - } + { + "mappings": { + "properties": { + "text_field": { + "type": "text" + }, + "ml.tokens": { + """ + "\"type\": \"" + tokensFieldType + "\"" + """ + } } } }"""); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index d4a0802b0c770..5980e04acbf9c 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.ml.integration; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; @@ -13,7 +14,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -309,7 +309,7 @@ public void testNotCreatedWhenAfterOtherMlIndexAndResetInProgress() throws Excep IndexRequest stateDoc = new IndexRequest(".ml-state"); stateDoc.source(Collections.singletonMap("state", "blah")); - IndexResponse indexResponse = client().index(stateDoc).actionGet(); + DocWriteResponse indexResponse = client().index(stateDoc).actionGet(); assertEquals(RestStatus.CREATED, indexResponse.status()); // Creating the .ml-state index would normally cause .ml-annotations diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java index 6d1155b1b8b64..1561520510c38 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.Tuple; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -77,7 +76,7 @@ public void createComponents() throws Exception { public void testCrud() throws InterruptedException { String datafeedId = "df1"; - AtomicReference> responseHolder = new AtomicReference<>(); + AtomicReference> responseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); // Create datafeed config @@ -164,7 +163,7 @@ public void testGetDatafeedConfig_missing() throws InterruptedException { public void testMultipleCreateAndDeletes() throws InterruptedException { String datafeedId = "df2"; - AtomicReference> responseHolder = new AtomicReference<>(); + AtomicReference> responseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); // Create datafeed config @@ -576,7 +575,7 @@ private Map createSecurityHeader() { private DatafeedConfig putDatafeedConfig(DatafeedConfig.Builder builder, Map headers) throws Exception { builder.setHeaders(headers); DatafeedConfig config = builder.build(); - this.>blockingCall( + this.>blockingCall( actionListener -> datafeedConfigProvider.putDatafeedConfig(config, headers, actionListener) ); return config; diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java index 2355904fb6910..bca437dbf676c 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -100,7 +99,7 @@ public void testCheckJobExists() throws InterruptedException { assertNull(exceptionHolder.get()); } - AtomicReference indexResponseHolder = new AtomicReference<>(); + AtomicReference indexResponseHolder = new AtomicReference<>(); // Create job Job job = createJob("existing-job", null).build(new Date()); @@ -120,7 +119,7 @@ public void testCheckJobExists() throws InterruptedException { public void testOverwriteNotAllowed() throws InterruptedException { final String jobId = "same-id"; - AtomicReference indexResponseHolder = new AtomicReference<>(); + AtomicReference indexResponseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); // Create job @@ -141,7 +140,7 @@ public void testOverwriteNotAllowed() throws InterruptedException { public void testCrud() throws InterruptedException { final String jobId = "crud-job"; - AtomicReference indexResponseHolder = new AtomicReference<>(); + AtomicReference indexResponseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); // Create job @@ -203,7 +202,7 @@ public void testCrud() throws InterruptedException { public void testUpdateWithAValidationError() throws Exception { final String jobId = "bad-update-job"; - AtomicReference indexResponseHolder = new AtomicReference<>(); + AtomicReference indexResponseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); // Create job @@ -233,7 +232,7 @@ public void testUpdateWithValidator() throws Exception { // Create job Job newJob = createJob(jobId, null).build(new Date()); - this.blockingCall(actionListener -> jobConfigProvider.putJob(newJob, actionListener)); + this.blockingCall(actionListener -> jobConfigProvider.putJob(newJob, actionListener)); JobUpdate jobUpdate = new JobUpdate.Builder(jobId).setDescription("This job has been updated").build(); @@ -664,7 +663,7 @@ private static Job.Builder addCustomRule(Job.Builder job, DetectionRule rule) { private Job putJob(Job.Builder job) throws Exception { Job builtJob = job.build(new Date()); - this.blockingCall(actionListener -> jobConfigProvider.putJob(builtJob, actionListener)); + this.blockingCall(actionListener -> jobConfigProvider.putJob(builtJob, actionListener)); return builtJob; } } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java index c659fb20aeaeb..1656970f17158 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java @@ -6,12 +6,12 @@ */ package org.elasticsearch.xpack.ml.integration; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -320,7 +320,7 @@ public void testGetTruncatedModelDeprecatedDefinition() throws Exception { new ToXContent.MapParams(Collections.singletonMap(FOR_INTERNAL_STORAGE, "true")) ) ) { - AtomicReference putDocHolder = new AtomicReference<>(); + AtomicReference putDocHolder = new AtomicReference<>(); blockingCall( listener -> client().prepareIndex(InferenceIndexConstants.LATEST_INDEX_NAME) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index c8e4fd488394a..d2d6bd4fcb443 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -1227,7 +1227,8 @@ public Collection createComponents( threadPool, new NodeLoadDetector(memoryTracker), systemAuditor, - nodeAvailabilityZoneMapper + nodeAvailabilityZoneMapper, + client ) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java index d1d66299db67f..acd0a124d59c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TrainedModelValidator.java @@ -59,7 +59,7 @@ static void validateMinimumVersion(ModelPackageConfig resolvedModelPackageConfig if (MlConfigVersion.getMinMlConfigVersion(state.nodes()).before(minimumVersion)) { throw new ActionRequestValidationException().addValidationError( format( - "The model [%s] requires that all nodes are at least version [%s]", + "The model [%s] requires that all nodes have ML config version [%s] or higher", resolvedModelPackageConfig.getPackagedModelId(), resolvedModelPackageConfig.getMinimumVersion() ) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index 88350aa5cf3db..7b3fa5252306d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -64,9 +64,9 @@ protected void doExecute(Task task, PutCalendarAction.Request request, ActionLis indexRequest.opType(DocWriteRequest.OpType.CREATE); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { listener.onResponse(new PutCalendarAction.Response(calendar)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index 851dcab1d6b5e..ea3404cd304c1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -9,9 +9,9 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -58,9 +58,9 @@ protected void doExecute(Task task, PutFilterAction.Request request, ActionListe throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); } - executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { listener.onResponse(new PutFilterAction.Response(filter)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6f97689222196..a0a2a81791550 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -71,6 +72,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.ml.inference.deployment.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.utils.TaskRetriever; @@ -78,6 +80,7 @@ import java.time.Instant; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; @@ -132,7 +135,7 @@ protected void masterOperation( Task task, PutTrainedModelAction.Request request, ClusterState state, - ActionListener listener + ActionListener finalResponseListener ) { TrainedModelConfig config = request.getTrainedModelConfig(); try { @@ -140,7 +143,9 @@ protected void masterOperation( config.ensureParsedDefinition(xContentRegistry); } } catch (IOException ex) { - listener.onFailure(ExceptionsHelper.badRequestException("Failed to parse definition for [{}]", ex, config.getModelId())); + finalResponseListener.onFailure( + ExceptionsHelper.badRequestException("Failed to parse definition for [{}]", ex, config.getModelId()) + ); return; } @@ -150,7 +155,7 @@ protected void masterOperation( try { config.getModelDefinition().getTrainedModel().validate(); } catch (ElasticsearchException ex) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException("Definition for [{}] has validation failures.", ex, config.getModelId()) ); return; @@ -158,7 +163,7 @@ protected void masterOperation( TrainedModelType trainedModelType = TrainedModelType.typeFromTrainedModel(config.getModelDefinition().getTrainedModel()); if (trainedModelType == null) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Unknown trained model definition class [{}]", config.getModelDefinition().getTrainedModel().getName() @@ -171,7 +176,7 @@ protected void masterOperation( // Set the model type from the definition config = new TrainedModelConfig.Builder(config).setModelType(trainedModelType).build(); } else if (trainedModelType != config.getModelType()) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "{} [{}] does not match the model definition type [{}]", TrainedModelConfig.MODEL_TYPE.getPreferredName(), @@ -183,7 +188,7 @@ protected void masterOperation( } if (config.getInferenceConfig().isTargetTypeSupported(config.getModelDefinition().getTrainedModel().targetType()) == false) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Model [{}] inference config type [{}] does not support definition target type [{}]", config.getModelId(), @@ -196,7 +201,7 @@ protected void masterOperation( TransportVersion minCompatibilityVersion = config.getModelDefinition().getTrainedModel().getMinimalCompatibilityVersion(); if (state.getMinTransportVersion().before(minCompatibilityVersion)) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Cannot create model [{}] while cluster upgrade is in progress.", config.getModelId() @@ -223,7 +228,7 @@ protected void masterOperation( } if (ModelAliasMetadata.fromState(state).getModelId(trainedModelConfig.getModelId()) != null) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "requested model_id [{}] is the same as an existing model_alias. Model model_aliases and ids must be unique", config.getModelId() @@ -233,7 +238,7 @@ protected void masterOperation( } if (TrainedModelAssignmentMetadata.fromState(state).hasDeployment(trainedModelConfig.getModelId())) { - listener.onFailure( + finalResponseListener.onFailure( ExceptionsHelper.badRequestException( "Cannot create model [{}] the id is the same as an current model deployment", config.getModelId() @@ -242,6 +247,14 @@ protected void masterOperation( return; } + ActionListener finalResponseAction = ActionListener.wrap((configToReturn) -> { + finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn)); + }, finalResponseListener::onFailure); + + ActionListener verifyClusterAndModelArchitectures = ActionListener.wrap((configToReturn) -> { + verifyMlNodesAndModelArchitectures(configToReturn, client, threadPool, finalResponseAction); + }, finalResponseListener::onFailure); + ActionListener finishedStoringListener = ActionListener.wrap(bool -> { TrainedModelConfig configToReturn = trainedModelConfig.clearDefinition().build(); if (modelPackageConfigHolder.get() != null) { @@ -250,19 +263,19 @@ protected void masterOperation( modelPackageConfigHolder.get(), request.isWaitForCompletion(), ActionListener.wrap( - downloadTriggered -> listener.onResponse(new PutTrainedModelAction.Response(configToReturn)), - listener::onFailure + downloadTriggered -> verifyClusterAndModelArchitectures.onResponse(configToReturn), + finalResponseListener::onFailure ) ); } else { - listener.onResponse(new PutTrainedModelAction.Response(configToReturn)); + finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn)); } - }, listener::onFailure); + }, finalResponseListener::onFailure); var isPackageModel = config.isPackagedModel(); ActionListener checkStorageIndexSizeListener = ActionListener.wrap( r -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), finishedStoringListener, isPackageModel), - listener::onFailure + finalResponseListener::onFailure ); ActionListener tagsModelIdCheckListener = ActionListener.wrap(r -> { @@ -276,7 +289,7 @@ protected void masterOperation( IndexStats indexStats = stats.getIndices().get(InferenceIndexConstants.nativeDefinitionStore()); if (indexStats != null && indexStats.getTotal().getStore().getSizeInBytes() > MAX_NATIVE_DEFINITION_INDEX_SIZE.getBytes()) { - listener.onFailure( + finalResponseListener.onFailure( new ElasticsearchStatusException( "Native model store has exceeded the maximum acceptable size of {}, " + "please delete older unused pytorch models", @@ -293,7 +306,7 @@ protected void masterOperation( checkStorageIndexSizeListener.onResponse(null); return; } - listener.onFailure( + finalResponseListener.onFailure( new ElasticsearchStatusException( "Unable to calculate stats for definition storage index [{}], please try again later", RestStatus.SERVICE_UNAVAILABLE, @@ -305,11 +318,11 @@ protected void masterOperation( return; } checkStorageIndexSizeListener.onResponse(null); - }, listener::onFailure); + }, finalResponseListener::onFailure); ActionListener modelIdTagCheckListener = ActionListener.wrap( r -> checkTagsAgainstModelIds(request.getTrainedModelConfig().getTags(), tagsModelIdCheckListener), - listener::onFailure + finalResponseListener::onFailure ); ActionListener handlePackageAndTagsListener = ActionListener.wrap(r -> { @@ -318,29 +331,61 @@ protected void masterOperation( try { TrainedModelValidator.validatePackage(trainedModelConfig, resolvedModelPackageConfig, state); } catch (ValidationException e) { - listener.onFailure(e); + finalResponseListener.onFailure(e); return; } modelPackageConfigHolder.set(resolvedModelPackageConfig); setTrainedModelConfigFieldsFromPackagedModel(trainedModelConfig, resolvedModelPackageConfig, xContentRegistry); checkModelIdAgainstTags(trainedModelConfig.getModelId(), modelIdTagCheckListener); - }, listener::onFailure)); + }, finalResponseListener::onFailure)); } else { checkModelIdAgainstTags(trainedModelConfig.getModelId(), modelIdTagCheckListener); } - }, listener::onFailure); + }, finalResponseListener::onFailure); checkForExistingTask( client, trainedModelConfig.getModelId(), request.isWaitForCompletion(), - listener, + finalResponseListener, handlePackageAndTagsListener, request.timeout() ); } + void verifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + Client client, + ThreadPool threadPool, + ActionListener configToReturnListener + ) { + ActionListener addWarningHeaderOnFailureListener = new ActionListener() { + @Override + public void onResponse(TrainedModelConfig config) { + assert Objects.equals(config, configToReturn); + configToReturnListener.onResponse(configToReturn); + } + + @Override + public void onFailure(Exception e) { + HeaderWarning.addWarning(e.getMessage()); + configToReturnListener.onResponse(configToReturn); + } + }; + + callVerifyMlNodesAndModelArchitectures(configToReturn, addWarningHeaderOnFailureListener, client, threadPool); + } + + void callVerifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + ActionListener failureListener, + Client client, + ThreadPool threadPool + ) { + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(failureListener, client, threadPool, configToReturn); + } + /** * This method is package private for testing */ diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 2a1844ea1fccf..ab215106c8ed0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -807,7 +807,7 @@ public static String nodeFilter(DiscoveryNode node, TaskParams params) { + id + "] on node [" + JobNodeSelector.nodeNameAndVersion(node) - + "], because the data frame analytics requires a node of version [" + + "], because the data frame analytics requires a node with ML config version [" + TaskParams.VERSION_INTRODUCED + "] or higher"; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java index 9b57f60e6b4ec..15de14d2de297 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -8,12 +8,12 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -128,9 +128,9 @@ private void indexUpdatedFilter( throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); } - executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { jobManager.notifyFilterChanged( filter, request.getAddItems(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 2ed8685cadaea..316e5707a48dd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -21,7 +21,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -113,7 +112,7 @@ public DatafeedConfigProvider(Client client, NamedXContentRegistry xContentRegis public void putDatafeedConfig( DatafeedConfig config, Map headers, - ActionListener> listener + ActionListener> listener ) { DatafeedConfig finalConfig; @@ -353,7 +352,7 @@ public void onResponse(GetResponse getResponse) { }); } - private void indexUpdatedConfig(DatafeedConfig updatedConfig, long seqNo, long primaryTerm, ActionListener listener) { + private void indexUpdatedConfig(DatafeedConfig updatedConfig, long seqNo, long primaryTerm, ActionListener listener) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName()).id(DatafeedConfig.documentId(updatedConfig.getId())) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java index 34ace0d0c9273..c6af1bcfa6f18 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java @@ -10,9 +10,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -215,7 +215,7 @@ void persistProgress(Client clientToUse, String jobId, Runnable runnable) { String progressDocId = StoredProgress.documentId(jobId); // Step 4: Run the runnable provided as the argument - ActionListener indexProgressDocListener = ActionListener.wrap(indexResponse -> { + ActionListener indexProgressDocListener = ActionListener.wrap(indexResponse -> { LOGGER.debug("[{}] Successfully indexed progress document: {}", jobId, storedProgress.get().get()); runnable.run(); }, indexError -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 2cfc330533109..f93988681f605 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -86,7 +86,7 @@ public class DataFrameDataExtractor { context.extractedFields.getAllFields().forEach(f -> this.extractedFieldsByName.put(f.getName(), f)); hasNext = true; hasPreviousSearchFailed = false; - this.trainTestSplitter = new CachedSupplier<>(context.trainTestSplitterFactory::create); + this.trainTestSplitter = CachedSupplier.wrap(context.trainTestSplitterFactory::create); } public Map getHeaders() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java index 4d7cb456d52d9..20791534b9801 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java @@ -10,12 +10,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; @@ -65,7 +65,7 @@ protected void doExecute(ActionListener listener) { listener::onFailure ); - ActionListener dataCountsIndexedListener = ActionListener.wrap( + ActionListener dataCountsIndexedListener = ActionListener.wrap( indexResponse -> refreshIndices(refreshListener), listener::onFailure ); @@ -73,7 +73,7 @@ protected void doExecute(ActionListener listener) { indexDataCounts(dataCountsIndexedListener); } - private void indexDataCounts(ActionListener listener) { + private void indexDataCounts(ActionListener listener) { DataCounts dataCounts = task.getStatsHolder().getDataCountsTracker().report(); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { dataCounts.toXContent( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index efc8bd84c6350..ea52c4918d05b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -15,6 +15,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.autoscaling.NodeAvailabilityZoneMapper; import org.elasticsearch.xpack.ml.inference.assignment.planning.AllocationReducer; +import org.elasticsearch.xpack.ml.inference.deployment.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.ml.job.NodeLoad; import org.elasticsearch.xpack.ml.job.NodeLoadDetector; import org.elasticsearch.xpack.ml.notifications.SystemAuditor; @@ -78,6 +80,7 @@ public class TrainedModelAssignmentClusterService implements ClusterStateListene private final NodeLoadDetector nodeLoadDetector; private final SystemAuditor systemAuditor; private final NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper; + private final Client client; private volatile int maxMemoryPercentage; private volatile boolean useAuto; private volatile int maxOpenJobs; @@ -91,7 +94,8 @@ public TrainedModelAssignmentClusterService( ThreadPool threadPool, NodeLoadDetector nodeLoadDetector, SystemAuditor systemAuditor, - NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper + NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper, + Client client ) { this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = Objects.requireNonNull(threadPool); @@ -104,6 +108,7 @@ public TrainedModelAssignmentClusterService( this.maxLazyMLNodes = MachineLearning.MAX_LAZY_ML_NODES.get(settings); this.maxMLNodeSize = MachineLearning.MAX_ML_NODE_SIZE.get(settings).getBytes(); this.allocatedProcessorsScale = MachineLearning.ALLOCATED_PROCESSORS_SCALE.get(settings); + this.client = client; // Only nodes that can possibly be master nodes really need this service running if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(this); @@ -150,14 +155,14 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String @Override public void clusterChanged(ClusterChangedEvent event) { - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + if (eventStateHasGlobalBlockStateNotRecoveredBlock(event)) { return; } if (event.localNodeMaster() == false) { return; } - if (event.state().getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION)) { + if (eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(event)) { // we should not try to rebalance assignments while there may be nodes running on a version // prior to introducing distributed model allocation. // But we should remove routing to removed or shutting down nodes. @@ -165,6 +170,10 @@ public void clusterChanged(ClusterChangedEvent event) { return; } + if (event.nodesAdded()) { + logMlNodeHeterogeneity(); + } + Optional rebalanceReason = detectReasonToRebalanceModels(event); if (rebalanceReason.isPresent()) { // As this produces a cluster state update task, we are certain that if the persistent @@ -187,6 +196,42 @@ public void clusterChanged(ClusterChangedEvent event) { } } + boolean eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(ClusterChangedEvent event) { + return event.state().getMinTransportVersion().before(DISTRIBUTED_MODEL_ALLOCATION_TRANSPORT_VERSION); + } + + boolean eventStateHasGlobalBlockStateNotRecoveredBlock(ClusterChangedEvent event) { + return event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK); + } + + void logMlNodeHeterogeneity() { + ActionListener> architecturesListener = getArchitecturesSetActionListener(); + MlPlatformArchitecturesUtil.getMlNodesArchitecturesSet(architecturesListener, client, threadPool); + } + + static ActionListener> getArchitecturesSetActionListener() { + ActionListener> architecturesListener = new ActionListener>() { + @Override + public void onResponse(Set architectures) { + if (architectures.size() > 1) { + logger.warn( + format( + "Heterogeneous platform architectures were detected among ML nodes. " + + "This will prevent the deployment of some trained models. Distinct platform architectures detected: %s", + architectures + ) + ); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("Failed to detect heterogeneity among ML nodes with exception: ", e); + } + }; + return architecturesListener; + } + private void removeRoutingToRemovedOrShuttingDownNodes(ClusterChangedEvent event) { if (areAssignedNodesRemoved(event)) { submitUnbatchedTask("removing routing entries for removed or shutting down nodes", new ClusterStateUpdateTask() { @@ -486,51 +531,89 @@ private void rebalanceAssignments( String reason, ActionListener listener ) { - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { - logger.debug(() -> format("Rebalancing model allocations because [%s]", reason)); - TrainedModelAssignmentMetadata.Builder rebalancedMetadata; - try { - rebalancedMetadata = rebalanceAssignments(clusterState, modelToAdd); - } catch (Exception e) { - listener.onFailure(e); - return; - } + ActionListener> architecturesListener = ActionListener.wrap((mlNodesArchitectures) -> { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + logger.debug(() -> format("Rebalancing model allocations because [%s]", reason)); + + TrainedModelAssignmentMetadata.Builder rebalancedMetadata; + try { + rebalancedMetadata = rebalanceAssignments(clusterState, modelToAdd); + } catch (Exception e) { + listener.onFailure(e); + return; + } - submitUnbatchedTask(reason, new ClusterStateUpdateTask() { + submitUnbatchedTask(reason, new ClusterStateUpdateTask() { - private volatile boolean isUpdated; - private volatile boolean isChanged; + private volatile boolean isUpdated; + private volatile boolean isChanged; - @Override - public ClusterState execute(ClusterState currentState) { + @Override + public ClusterState execute(ClusterState currentState) { - if (areClusterStatesCompatibleForRebalance(clusterState, currentState)) { - isUpdated = true; - ClusterState updatedState = update(currentState, rebalancedMetadata); - isChanged = updatedState != currentState; - return updatedState; + currentState = stopPlatformSpecificModelsInHeterogeneousClusters( + currentState, + mlNodesArchitectures, + modelToAdd, + clusterState + ); + + if (areClusterStatesCompatibleForRebalance(clusterState, currentState)) { + isUpdated = true; + ClusterState updatedState = update(currentState, rebalancedMetadata); + isChanged = updatedState != currentState; + return updatedState; + } + + rebalanceAssignments(currentState, modelToAdd, reason, listener); + return currentState; } - rebalanceAssignments(currentState, modelToAdd, reason, listener); - return currentState; - } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - if (isUpdated) { - if (isChanged) { - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute(() -> systemAuditor.info(Messages.getMessage(Messages.INFERENCE_DEPLOYMENT_REBALANCED, reason))); + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + if (isUpdated) { + if (isChanged) { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute( + () -> systemAuditor.info(Messages.getMessage(Messages.INFERENCE_DEPLOYMENT_REBALANCED, reason)) + ); + } + listener.onResponse(TrainedModelAssignmentMetadata.fromState(newState)); } - listener.onResponse(TrainedModelAssignmentMetadata.fromState(newState)); } - } + }); }); - }); + }, listener::onFailure); + + MlPlatformArchitecturesUtil.getMlNodesArchitecturesSet(architecturesListener, client, threadPool); + } + + ClusterState stopPlatformSpecificModelsInHeterogeneousClusters( + ClusterState updatedState, + Set mlNodesArchitectures, + Optional modelToAdd, + ClusterState clusterState + ) { + if (mlNodesArchitectures.size() > 1 && modelToAdd.isPresent()) { + String reasonToStop = format( + "ML nodes in this cluster have multiple platform architectures, " + + "but can only have one for this model ([%s]); " + + "detected architectures: %s", + modelToAdd.get().getModelId(), + mlNodesArchitectures + ); + updatedState = callSetToStopping(reasonToStop, modelToAdd.get().getDeploymentId(), clusterState); + } + return updatedState; + } + + ClusterState callSetToStopping(String reasonToStop, String deploymentId, ClusterState clusterState) { + return setToStopping(clusterState, deploymentId, reasonToStop); } private boolean areClusterStatesCompatibleForRebalance(ClusterState source, ClusterState target) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 03f34dacb1faf..fcb44d0f391fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -164,9 +164,7 @@ public void startDeployment(TrainedModelDeploymentTask task, ActionListener getModelListener = ActionListener.wrap(getModelResponse -> { - assert getModelResponse.getResources().results().size() == 1; - TrainedModelConfig modelConfig = getModelResponse.getResources().results().get(0); + ActionListener getVerifiedModel = ActionListener.wrap((modelConfig) -> { processContext.modelInput.set(modelConfig.getInput()); if (modelConfig.getInferenceConfig() instanceof NlpConfig nlpConfig) { @@ -209,15 +207,57 @@ public void startDeployment(TrainedModelDeploymentTask task, ActionListener verifyModelAndClusterArchitecturesListener = ActionListener.wrap( + getModelResponse -> { + assert getModelResponse.getResources().results().size() == 1; + TrainedModelConfig modelConfig = getModelResponse.getResources().results().get(0); + + verifyMlNodesAndModelArchitectures(modelConfig, client, threadPool, getVerifiedModel); + + }, + failedDeploymentListener::onFailure + ); + executeAsyncWithOrigin( client, ML_ORIGIN, GetTrainedModelsAction.INSTANCE, new GetTrainedModelsAction.Request(task.getParams().getModelId()), - getModelListener + verifyModelAndClusterArchitecturesListener ); } + void verifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + Client client, + ThreadPool threadPool, + ActionListener configToReturnListener + ) { + ActionListener verifyConfigListener = new ActionListener() { + @Override + public void onResponse(TrainedModelConfig config) { + assert Objects.equals(config, configToReturn); + configToReturnListener.onResponse(configToReturn); + } + + @Override + public void onFailure(Exception e) { + configToReturnListener.onFailure(e); + } + }; + + callVerifyMlNodesAndModelArchitectures(configToReturn, verifyConfigListener, client, threadPool); + } + + void callVerifyMlNodesAndModelArchitectures( + TrainedModelConfig configToReturn, + ActionListener configToReturnListener, + Client client, + ThreadPool threadPool + ) { + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(configToReturnListener, client, threadPool, configToReturn); + } + private SearchRequest vocabSearchRequest(VocabularyConfig vocabularyConfig, String modelId) { return client.prepareSearch(vocabularyConfig.getIndex()) .setQuery(new IdsQueryBuilder().addIds(VocabularyConfig.docId(modelId))) @@ -394,11 +434,11 @@ class ProcessContext { private final PyTorchResultProcessor resultProcessor; private final PyTorchStateStreamer stateStreamer; private final PriorityProcessWorkerExecutorService priorityProcessWorker; + private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); + private final AtomicInteger timeoutCount = new AtomicInteger(); private volatile Instant startTime; private volatile Integer numThreadsPerAllocation; private volatile Integer numAllocations; - private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); - private final AtomicInteger timeoutCount = new AtomicInteger(); private volatile boolean isStopped; private static final TimeValue COMPLETION_TIMEOUT = TimeValue.timeValueMinutes(3); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtil.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtil.java new file mode 100644 index 0000000000000..ff8ac1dbb3eec --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtil.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.deployment; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.monitor.os.OsInfo; +import org.elasticsearch.plugins.Platforms; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.util.Iterator; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class MlPlatformArchitecturesUtil { + + public static void getMlNodesArchitecturesSet(ActionListener> architecturesListener, Client client, ThreadPool threadPool) { + ActionListener listener = MlPlatformArchitecturesUtil.getArchitecturesSetFromNodesInfoResponseListener( + threadPool, + architecturesListener + ); + + NodesInfoRequest request = MlPlatformArchitecturesUtil.getNodesInfoBuilderWithMlNodeArchitectureInfo(client).request(); + executeAsyncWithOrigin(client, ML_ORIGIN, NodesInfoAction.INSTANCE, request, listener); + } + + static ActionListener getArchitecturesSetFromNodesInfoResponseListener( + ThreadPool threadPool, + ActionListener> architecturesListener + ) { + return ActionListener.wrap(nodesInfoResponse -> { + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + architecturesListener.onResponse(getArchitecturesSetFromNodesInfoResponse(nodesInfoResponse)); + }); + }, architecturesListener::onFailure); + } + + static NodesInfoRequestBuilder getNodesInfoBuilderWithMlNodeArchitectureInfo(Client client) { + return client.admin().cluster().prepareNodesInfo().clear().setNodesIds("ml:true").setOs(true).setPlugins(true); + } + + private static Set getArchitecturesSetFromNodesInfoResponse(NodesInfoResponse nodesInfoResponse) { + return nodesInfoResponse.getNodes() + .stream() + .filter(node -> node.getNode().hasRole(DiscoveryNodeRole.ML_ROLE.roleName())) + .map(node -> { + OsInfo osInfo = node.getInfo(OsInfo.class); + return Platforms.platformName(osInfo.getName(), osInfo.getArch()); + }) + .collect(Collectors.toUnmodifiableSet()); + } + + public static void verifyMlNodesAndModelArchitectures( + ActionListener successOrFailureListener, + Client client, + ThreadPool threadPool, + TrainedModelConfig configToReturn + ) { + String modelID = configToReturn.getModelId(); + String modelPlatformArchitecture = configToReturn.getPlatformArchitecture(); + + String modifiedPlatformArchitecture = (modelPlatformArchitecture == null && modelID.contains("linux-x86_64")) + ? "linux-x86_64" + : null; + ActionListener> architecturesListener = ActionListener.wrap((architectures) -> { + verifyMlNodesAndModelArchitectures(architectures, modifiedPlatformArchitecture, modelID); + successOrFailureListener.onResponse(configToReturn); + }, successOrFailureListener::onFailure); + + getMlNodesArchitecturesSet(architecturesListener, client, threadPool); + } + + static void verifyMlNodesAndModelArchitectures(Set architectures, String modelPlatformArchitecture, String modelID) + throws IllegalArgumentException, IllegalStateException { + + String architecture = null; + Iterator architecturesIterator = architectures.iterator(); + // If there are no ML nodes at all in the current cluster we assume that any that are added later will work + if (modelPlatformArchitecture == null || architectures.isEmpty() || architecturesIterator.hasNext() == false) { + return; + } + + if (architectures.size() > 1) { + throw new IllegalStateException( + format( + "ML nodes in this cluster have multiple platform architectures, but can only have one for this model ([%s]); " + + "expected [%s]; " + + "but was %s", + modelID, + modelPlatformArchitecture, + architectures + ) + ); + } + + if (Objects.equals(architecturesIterator.next(), modelPlatformArchitecture) == false) { + + throw new IllegalArgumentException( + format( + "The model being deployed ([%s]) is platform specific and incompatible with ML nodes in the cluster; " + + "expected [%s]; " + + "but was %s", + modelID, + modelPlatformArchitecture, + architectures + ) + ); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index ba3a52df5cdb6..14a9474a0c787 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -11,7 +11,7 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; @@ -666,7 +666,7 @@ public void revertSnapshot( // Step 3. After the model size stats is persisted, also persist the snapshot's quantiles and respond // ------- - CheckedConsumer modelSizeStatsResponseHandler = response -> { + CheckedConsumer modelSizeStatsResponseHandler = response -> { // In case we are reverting to the empty snapshot the quantiles will be null if (modelSnapshot.getQuantiles() == null) { actionListener.onResponse(new RevertModelSnapshotAction.Response(modelSnapshot)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java index 2997af2e5a1a8..a24e671d1fe25 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.autoscaling.NativeMemoryCapacity; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; @@ -346,7 +347,7 @@ static String nodeNameOrId(DiscoveryNode node) { public static String nodeNameAndVersion(DiscoveryNode node) { String nodeNameOrID = nodeNameOrId(node); StringBuilder builder = new StringBuilder("{").append(nodeNameOrID).append('}'); - builder.append('{').append("version=").append(node.getVersion()).append('}'); + builder.append('{').append("ML config version=").append(MlConfigVersion.fromNode(node)).append('}'); return builder.toString(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 5a00bb748fa4e..eababb5fab02c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -120,7 +119,7 @@ public JobConfigProvider(Client client, NamedXContentRegistry xContentRegistry) * @param job The anomaly detector job configuration * @param listener Index response listener */ - public void putJob(Job job, ActionListener listener) { + public void putJob(Job job, ActionListener listener) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = job.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName()).id(Job.documentId(job.getId())) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index c2661fc933ca9..e65e0abaca2e3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteResponse.Result; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -338,7 +339,7 @@ public void persistQuantiles(Quantiles quantiles, Supplier shouldRetry) /** * Persist the quantiles (async) */ - public void persistQuantiles(Quantiles quantiles, WriteRequest.RefreshPolicy refreshPolicy, ActionListener listener) { + public void persistQuantiles(Quantiles quantiles, WriteRequest.RefreshPolicy refreshPolicy, ActionListener listener) { String quantilesDocId = Quantiles.documentId(quantiles.getJobId()); // Step 2: Create or update the quantiles document: @@ -414,7 +415,7 @@ public void persistModelSizeStats(ModelSizeStats modelSizeStats, Supplier listener + ActionListener listener ) { String jobId = modelSizeStats.getJobId(); logger.trace("[{}] Persisting model size stats, for size {}", jobId, modelSizeStats.getModelBytes()); @@ -573,7 +574,7 @@ void persist(Supplier shouldRetry, boolean requireAlias, ActionListener } } - void persistWithoutRetries(ActionListener listener, boolean requireAlias) { + void persistWithoutRetries(ActionListener listener, boolean requireAlias) { logCall(); try (XContentBuilder content = toXContentBuilder(object, params)) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index b19c0fb670a59..15b1993dc0586 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -179,7 +179,7 @@ public static String nodeFilter(DiscoveryNode node, Job job) { + jobId + "] on node [" + JobNodeSelector.nodeNameAndVersion(node) - + "], because the job's model snapshot requires a node of version [" + + "], because the job's model snapshot requires a node with ML config version [" + job.getModelSnapshotMinVersion() + "] or higher"; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java index 310f7d22f1136..1e662e28cf564 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; @@ -74,6 +74,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } } - return channel -> client.execute(GetCalendarsAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute(GetCalendarsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java index e2b20566a4989..b4c76c3fa4475 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -62,7 +62,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute( GetFiltersAction.INSTANCE, request, - new RestStatusToXContentListener<>(channel) + new RestToXContentListener<>(channel) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java index 1475b64e381ea..ae7b26ebad0e4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java @@ -138,7 +138,7 @@ public RestResponse buildResponse(T response, XContentBuilder builder) throws Ex includes.forEach(include -> params.put(include, "true")); params.put(ToXContentParams.FOR_INTERNAL_STORAGE, "false"); response.toXContent(builder, new ToXContent.MapParams(params)); - return new RestResponse(getStatus(response), builder); + return new RestResponse(statusFunction.apply(response), builder); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 8ddfe39188fa8..fa9a15b246f84 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -10,7 +10,8 @@ import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ml.action.PostDataAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -48,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient request.setResetEnd(restRequest.param(PostDataAction.Request.RESET_END.getPreferredName(), DEFAULT_RESET_END)); request.setContent(restRequest.content(), restRequest.getXContentType()); - return channel -> client.execute(PostDataAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute(PostDataAction.INSTANCE, request, new RestToXContentListener<>(channel, r -> RestStatus.ACCEPTED)); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java index e33bdf2d100e6..f1411db1e99e8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -68,6 +68,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient request.timeout(restRequest.paramAsTime("timeout", request.timeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); - return channel -> client.execute(RevertModelSnapshotAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute(RevertModelSnapshotAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java index 45d5536a9f1c6..911305aca474c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -55,10 +55,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient parser ); - return channel -> client.execute( - UpdateModelSnapshotAction.INSTANCE, - updateModelSnapshot, - new RestStatusToXContentListener<>(channel) - ); + return channel -> client.execute(UpdateModelSnapshotAction.INSTANCE, updateModelSnapshot, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java index d99147e6b4e98..f8755b282c6a1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java @@ -11,13 +11,16 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfigTests; +import java.net.InetAddress; import java.util.Map; import static org.mockito.Mockito.mock; @@ -31,12 +34,14 @@ public void testValidateMinimumVersion() { .setMinimumVersion("9999.0.0") .build(); - DiscoveryNode node = mock(DiscoveryNode.class); final Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.CURRENT.toString()); - when(node.getAttributes()).thenReturn(attributes); - when(node.getVersion()).thenReturn(Version.CURRENT); - when(node.getMinIndexVersion()).thenReturn(IndexVersion.current()); - when(node.getId()).thenReturn("node1"); + DiscoveryNode node = DiscoveryNodeUtils.create( + "node1name", + "node1", + new TransportAddress(InetAddress.getLoopbackAddress(), 9301), + attributes, + DiscoveryNodeRole.roles() + ); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); @@ -52,7 +57,7 @@ public void testValidateMinimumVersion() { assertEquals( "Validation Failed: 1: The model [" + packageConfig.getPackagedModelId() - + "] requires that all nodes are at least version [9999.0.0];", + + "] requires that all nodes have ML config version [9999.0.0] or higher;", e.getMessage() ); } @@ -63,12 +68,11 @@ public void testValidateMinimumVersion() { ModelPackageConfigTests.randomModulePackageConfig() ).setMinimumVersion(MlConfigVersion.CURRENT.toString()).build(); - DiscoveryNode node = mock(DiscoveryNode.class); - final Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_7_0.toString()); - when(node.getAttributes()).thenReturn(attributes); - when(node.getVersion()).thenReturn(Version.V_8_7_0); - when(node.getMinIndexVersion()).thenReturn(IndexVersion.current()); - when(node.getId()).thenReturn("node1"); + DiscoveryNode node = DiscoveryNodeUtils.create( + "node1", + new TransportAddress(InetAddress.getLoopbackAddress(), 9300), + Version.V_8_7_0 + ); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); @@ -82,9 +86,9 @@ public void testValidateMinimumVersion() { assertEquals( "Validation Failed: 1: The model [" + packageConfigCurrent.getPackagedModelId() - + "] requires that all nodes are at least version [" + + "] requires that all nodes have ML config version [" + MlConfigVersion.CURRENT - + "];", + + "] or higher;", e.getMessage() ); } @@ -95,12 +99,11 @@ public void testValidateMinimumVersion() { ModelPackageConfigTests.randomModulePackageConfig() ).setMinimumVersion("_broken_version_").build(); - DiscoveryNode node = mock(DiscoveryNode.class); - final Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_7_0.toString()); - when(node.getAttributes()).thenReturn(attributes); - when(node.getVersion()).thenReturn(Version.V_8_7_0); - when(node.getMinIndexVersion()).thenReturn(IndexVersion.current()); - when(node.getId()).thenReturn("node1"); + DiscoveryNode node = DiscoveryNodeUtils.create( + "node1", + new TransportAddress(InetAddress.getLoopbackAddress(), 9300), + Version.V_8_7_0 + ); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java index 514a1e2243531..f708ef1fb2959 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelActionTests.java @@ -11,9 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; @@ -21,6 +24,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -50,6 +54,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigTests; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.IOException; import java.util.Collections; @@ -65,6 +70,11 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; public class TransportPutTrainedModelActionTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); @@ -205,6 +215,42 @@ public void testCheckForExistingTaskReturnsTask() { assertThat(returnedModel.getResponse().getModelId(), is(trainedModel.getModelId())); } + public void testVerifyMlNodesAndModelArchitectures_GivenIllegalArgumentException_ThenSetHeaderWarning() { + + TransportPutTrainedModelAction actionSpy = spy(createTransportPutTrainedModelAction()); + @SuppressWarnings("unchecked") + ArgumentCaptor> failureListener = ArgumentCaptor.forClass(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener mockConfigToReturnListener = mock(ActionListener.class); + TrainedModelConfig mockConfigToReturn = mock(TrainedModelConfig.class); + doNothing().when(mockConfigToReturnListener).onResponse(any()); + + doNothing().when(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), any(), any(), any()); + actionSpy.verifyMlNodesAndModelArchitectures(mockConfigToReturn, null, threadPool, mockConfigToReturnListener); + verify(actionSpy).verifyMlNodesAndModelArchitectures(any(), any(), any(), any()); + verify(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), failureListener.capture(), any(), any()); + + String warningMessage = "TEST HEADER WARNING"; + failureListener.getValue().onFailure(new IllegalArgumentException(warningMessage)); + assertWarnings(warningMessage); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenArchitecturesMatch_ThenTriggerOnResponse() { + + TransportPutTrainedModelAction actionSpy = spy(createTransportPutTrainedModelAction()); + @SuppressWarnings("unchecked") + ArgumentCaptor> successListener = ArgumentCaptor.forClass(ActionListener.class); + @SuppressWarnings("unchecked") + ActionListener mockConfigToReturnListener = mock(ActionListener.class); + TrainedModelConfig mockConfigToReturn = mock(TrainedModelConfig.class); + + doNothing().when(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), any(), any(), any()); + actionSpy.verifyMlNodesAndModelArchitectures(mockConfigToReturn, null, threadPool, mockConfigToReturnListener); + verify(actionSpy).callVerifyMlNodesAndModelArchitectures(any(), successListener.capture(), any(), any()); + + ensureNoWarnings(); + } + private static void prepareGetTrainedModelResponse(Client client, List trainedModels) { doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") @@ -220,6 +266,30 @@ private static void prepareGetTrainedModelResponse(Client client, List architecturesSet = new HashSet<>(randomList(0, 1, () -> randomAlphaOfLength(10))); + + final ActionListener> underTestListener = TrainedModelAssignmentClusterService.getArchitecturesSetActionListener(); + + underTestListener.onResponse(architecturesSet); + + LogEvent lastEvent = appender.getLastEventAndReset(); + assertNull(lastEvent); + } + + public void testLogMlNodeHeterogeneity_GivenTwoArchitecture_ThenWarn() throws InterruptedException { + String nodeArch = randomAlphaOfLength(10); + Set architecturesSet = Set.of(nodeArch, nodeArch + "2"); // architectures must be different + + final ActionListener> underTestListener = TrainedModelAssignmentClusterService.getArchitecturesSetActionListener(); + underTestListener.onResponse(architecturesSet); + + LogEvent lastEvent = appender.getLastEventAndReset(); + + assertEquals(Level.WARN, lastEvent.getLevel()); + + Message m = lastEvent.getMessage(); + String fm = m.getFormattedMessage(); + String expected = Strings.format( + "Heterogeneous platform architectures were detected among ML nodes. " + + "This will prevent the deployment of some trained models. Distinct platform architectures detected: %s", + architecturesSet + ); + + assertEquals(expected, fm); + } + + public void testLogMlNodeHeterogeneity_GivenFailure_ThenError() throws InterruptedException { + RuntimeException e = new RuntimeException("Test Runtime Exception"); + final ActionListener> underTestListener = TrainedModelAssignmentClusterService.getArchitecturesSetActionListener(); + underTestListener.onFailure(e); + + LogEvent lastEvent = appender.getLastEventAndReset(); + + assertEquals(Level.ERROR, lastEvent.getLevel()); + + Message m = lastEvent.getMessage(); + String fm = m.getFormattedMessage(); + + assertEquals("Failed to detect heterogeneity among ML nodes with exception: ", fm); + assertEquals(e, lastEvent.getThrown()); + } + + public void testClusterChanged_GivenNodesAdded_ThenLogMlNodeHeterogeneityCalled() { + nodeAvailabilityZoneMapper = mock(NodeAvailabilityZoneMapper.class); + TrainedModelAssignmentClusterService serviceSpy = spy(createClusterService(randomInt(5))); + doNothing().when(serviceSpy).logMlNodeHeterogeneity(); + doReturn(false).when(serviceSpy).eventStateHasGlobalBlockStateNotRecoveredBlock(any()); + doReturn(false).when(serviceSpy).eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(any()); + + ClusterChangedEvent mockNodesAddedEvent = mock(ClusterChangedEvent.class); + ClusterState mockState = mock(ClusterState.class); + doReturn(mockState).when(mockNodesAddedEvent).state(); + Metadata mockMetadata = mock(Metadata.class); + doReturn(mockMetadata).when(mockState).getMetadata(); + doReturn(null).when(mockState).custom(anyString()); + + doReturn(true).when(mockNodesAddedEvent).localNodeMaster(); + doReturn(true).when(mockNodesAddedEvent).nodesAdded(); + + serviceSpy.clusterChanged(mockNodesAddedEvent); + Mockito.verify(serviceSpy).logMlNodeHeterogeneity(); + Mockito.verify(mockNodesAddedEvent).nodesAdded(); + } + + public void testStopPlatformSpecificModelsInHeterogeneousClusters_GivenMultipleMlNodeArchitectures_ThenCallSetToStopping() { + nodeAvailabilityZoneMapper = mock(NodeAvailabilityZoneMapper.class); + TrainedModelAssignmentClusterService serviceSpy = spy(createClusterService(randomInt(5))); + + Set architecturesSet = new HashSet<>(randomList(2, 5, () -> randomAlphaOfLength(10))); + ClusterState mockUpdatedState = mock(ClusterState.class); + ClusterState mockClusterState = mock(ClusterState.class); + StartTrainedModelDeploymentAction.TaskParams mockModelToAdd = mock(StartTrainedModelDeploymentAction.TaskParams.class); + Optional optionalModelToAdd = Optional.of(mockModelToAdd); + String modelId = randomAlphaOfLength(10); + String deploymentId = randomAlphaOfLength(10); + when(mockModelToAdd.getModelId()).thenReturn(modelId); + when(mockModelToAdd.getDeploymentId()).thenReturn(deploymentId); + + String reasonToStop = format( + "ML nodes in this cluster have multiple platform architectures, " + + "but can only have one for this model ([%s]); " + + "detected architectures: %s", + modelId, + architecturesSet + ); + + doReturn(mockUpdatedState).when(serviceSpy).callSetToStopping(reasonToStop, deploymentId, mockClusterState); + + ClusterState updatedMockClusterState = serviceSpy.stopPlatformSpecificModelsInHeterogeneousClusters( + mockUpdatedState, + architecturesSet, + optionalModelToAdd, + mockClusterState + ); + + verify(serviceSpy).callSetToStopping(reasonToStop, deploymentId, mockClusterState); } public void testUpdateModelRoutingTable() { @@ -1878,7 +2019,8 @@ private TrainedModelAssignmentClusterService createClusterService(int maxLazyNod threadPool, nodeLoadDetector, systemAuditor, - nodeAvailabilityZoneMapper + nodeAvailabilityZoneMapper, + client ); } @@ -1948,4 +2090,36 @@ private static StartTrainedModelDeploymentAction.TaskParams newParams( ); } + protected void assertAsync( + Consumer> function, + T expected, + CheckedConsumer onAnswer, + Consumer onException + ) throws InterruptedException { + + CountDownLatch latch = new CountDownLatch(1); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + if (expected == null) { + fail("expected an exception but got a response"); + } else { + assertThat(r, equalTo(expected)); + } + if (onAnswer != null) { + onAnswer.accept(r); + } + }, e -> { + if (onException == null) { + logger.error("got unexpected exception", e); + fail("got unexpected exception: " + e.getMessage()); + } else { + onException.accept(e); + } + }), latch); + + function.accept(listener); + latch.countDown(); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); + } + } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java index 0bc898f434030..028c4b48ad355 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManagerTests.java @@ -108,4 +108,5 @@ public void testRejectedExecution() { assertThat(rejectedCount.intValue(), equalTo(1)); } + } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtilTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtilTests.java new file mode 100644 index 0000000000000..28fc3db10cbe8 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/MlPlatformArchitecturesUtilTests.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.deployment; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.monitor.os.OsInfo; +import org.elasticsearch.plugins.Platforms; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MlPlatformArchitecturesUtilTests extends ESTestCase { + + public void testGetNodesOsArchitectures() throws InterruptedException { + var threadPool = mock(ThreadPool.class); + var mockExectutorServervice = mock(ExecutorService.class); + doNothing().when(mockExectutorServervice).execute(any()); + when(threadPool.executor(anyString())).thenReturn(mockExectutorServervice); + + var mockNodesInfoResponse = mock(NodesInfoResponse.class); + List nodeInfoList = randomNodeInfos(4); + when(mockNodesInfoResponse.getNodes()).thenReturn(nodeInfoList); + + var expected = nodeInfoList.stream().filter(node -> node.getNode().hasRole(DiscoveryNodeRole.ML_ROLE.roleName())).map(node -> { + OsInfo osInfo = node.getInfo(OsInfo.class); + return Platforms.platformName(osInfo.getName(), osInfo.getArch()); + }).collect(Collectors.toUnmodifiableSet()); + + assertAsync(new Consumer>>() { + @Override + public void accept(ActionListener> setActionListener) { + final ActionListener nodesInfoResponseActionListener = MlPlatformArchitecturesUtil + .getArchitecturesSetFromNodesInfoResponseListener(threadPool, setActionListener); + nodesInfoResponseActionListener.onResponse(mockNodesInfoResponse); + } + + }, expected, null, null); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenNullModelArchitecture_ThenNothing() { + var architectures = nArchitectures(randomIntBetween(2, 10)); + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, null, randomAlphaOfLength(10)); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenZeroArches_ThenNothing() { + var architectures = new HashSet(); + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenOneArchMatches_ThenNothing() { + Set architectures = nArchitectures(1); + String architecture = architectures.iterator().next(); + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, architecture, randomAlphaOfLength(10)); + } + + public void testVerifyMlNodesAndModelArchitectures_GivenAtLeastTwoArches_ThenThrowsISE() { + var architectures = nArchitectures(randomIntBetween(2, 10)); + var modelId = randomAlphaOfLength(10); + var requiredArch = randomAlphaOfLength(10); + String message = "ML nodes in this cluster have multiple platform architectures, " + + "but can only have one for this model ([" + + modelId + + "]); " + + "expected [" + + requiredArch + + "]; but was " + + architectures + + ""; + + Throwable exception = expectThrows( + IllegalStateException.class, + "Expected IllegalStateException but no exception was thrown", + () -> MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(architectures, requiredArch, modelId) + ); + assertEquals(exception.getMessage(), message); + } + + public void testVerifyArchitectureMatchesModelPlatformArchitecture_GivenRequiredArchMatches_ThenNothing() { + var requiredArch = randomAlphaOfLength(10); + + var modelId = randomAlphaOfLength(10); + + MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures( + new HashSet<>(Collections.singleton(requiredArch)), + requiredArch, + modelId + ); + } + + public void testVerifyArchitectureMatchesModelPlatformArchitecture_GivenRequiredArchDoesNotMatch_ThenThrowsIAE() { + var requiredArch = randomAlphaOfLength(10); + String architecturesStr = requiredArch + "-DIFFERENT"; + + var modelId = randomAlphaOfLength(10); + String message = "The model being deployed ([" + + modelId + + "]) is platform specific and incompatible with ML nodes in the cluster; " + + "expected [" + + requiredArch + + "]; but was [" + + architecturesStr + + "]"; + + Throwable exception = expectThrows( + IllegalArgumentException.class, + "Expected IllegalArgumentException but no exception was thrown", + () -> MlPlatformArchitecturesUtil.verifyMlNodesAndModelArchitectures(Set.of(architecturesStr), requiredArch, modelId) + ); + assertEquals(exception.getMessage(), message); + } + + private Set nArchitectures(Integer n) { + Set architectures = new HashSet(n); + for (int i = 0; i < n; i++) { + architectures.add(randomAlphaOfLength(10)); + } + return architectures; + } + + private List randomNodeInfos(int max) { + assertTrue(max > 0); + int n = randomInt(max); + List nodeInfos = new ArrayList<>(n); + for (int i = 0; i < n; i++) { + nodeInfos.add(mockNodeInfo()); + } + return nodeInfos; + } + + private NodeInfo mockNodeInfo() { + var mockNodeInfo = mock(NodeInfo.class); + var mockDiscoveryNode = mock(DiscoveryNode.class); + when(mockNodeInfo.getNode()).thenReturn(mockDiscoveryNode); + when(mockDiscoveryNode.hasRole(DiscoveryNodeRole.ML_ROLE.roleName())).thenReturn(randomBoolean()); + var mockOsInfo = mock(OsInfo.class); + when(mockNodeInfo.getInfo(OsInfo.class)).thenReturn(mockOsInfo); + when(mockOsInfo.getArch()).thenReturn(randomAlphaOfLength(10)); + when(mockOsInfo.getName()).thenReturn(randomAlphaOfLength(10)); + + return mockNodeInfo; + } + + protected void assertAsync( + Consumer> function, + T expected, + CheckedConsumer onAnswer, + Consumer onException + ) throws InterruptedException { + + CountDownLatch latch = new CountDownLatch(1); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + if (expected == null) { + fail("expected an exception but got a response"); + } else { + assertThat(r, equalTo(expected)); + } + if (onAnswer != null) { + onAnswer.accept(r); + } + }, e -> { + if (onException == null) { + logger.error("got unexpected exception", e); + fail("got unexpected exception: " + e.getMessage()); + } else { + onException.accept(e); + } + }), latch); + + function.accept(listener); + latch.countDown(); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java index 19546b37c00cd..ab815aad543b8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java @@ -42,7 +42,6 @@ import java.util.Collection; import java.util.Collections; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -81,12 +80,25 @@ public void setup() { when(memoryTracker.getJobMemoryRequirement(anyString(), anyString())).thenReturn(JOB_MEMORY_REQUIREMENT.getBytes()); } - public void testNodeNameAndVersion() { + public void testNodeNameAndVersionForRecentNode() { TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); - Map attributes = new HashMap<>(); - attributes.put("unrelated", "attribute"); + Map attributes = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, "10.0.0", "unrelated", "attribute"); DiscoveryNode node = DiscoveryNodeUtils.create("_node_name1", "_node_id1", ta, attributes, ROLES_WITHOUT_ML); - assertEquals("{_node_name1}{version=" + node.getVersion() + "}", JobNodeSelector.nodeNameAndVersion(node)); + assertEquals("{_node_name1}{ML config version=10.0.0}", JobNodeSelector.nodeNameAndVersion(node)); + } + + public void testNodeNameAndVersionForOldNode() { + TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); + Map attributes = Map.of("unrelated", "attribute"); + DiscoveryNode node = new DiscoveryNode( + "_node_name2", + "_node_id2", + ta, + attributes, + ROLES_WITH_ML, + VersionInformation.inferVersions(Version.V_8_7_0) + ); + assertEquals("{_node_name2}{ML config version=8.7.0}", JobNodeSelector.nodeNameAndVersion(node)); } public void testNodeNameAndMlAttributes() { @@ -869,12 +881,12 @@ public void testSelectLeastLoadedMlNode_reasonsAreInDeterministicOrder() { assertThat( result.getExplanation(), equalTo( - "Not opening job [incompatible_type_job] on node [{_node_name1}{version=" - + Version.CURRENT + "Not opening job [incompatible_type_job] on node [{_node_name1}{ML config version=" + + MlConfigVersion.CURRENT + "}], " + "because this node does not support jobs of type [incompatible_type]|" - + "Not opening job [incompatible_type_job] on node [{_node_name2}{version=" - + Version.CURRENT + + "Not opening job [incompatible_type_job] on node [{_node_name2}{ML config version=" + + MlConfigVersion.CURRENT + "}], " + "because this node does not support jobs of type [incompatible_type]" ) @@ -946,7 +958,10 @@ public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() node -> nodeFilter(node, job) ); PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10, 2, 30, MAX_JOB_BYTES, false); - assertThat(result.getExplanation(), containsString("job's model snapshot requires a node of version [7.3.0] or higher")); + assertThat( + result.getExplanation(), + containsString("job's model snapshot requires a node with ML config version [7.3.0] or higher") + ); assertNull(result.getExecutorNode()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index 2dc8b77fc5834..fe82cfcb00d2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -375,7 +376,7 @@ private void testPersistQuantilesAsync(SearchHits searchHits, String expectedInd doAnswer(withResponse(indexResponse)).when(client).execute(eq(IndexAction.INSTANCE), any(), any()); Quantiles quantiles = new Quantiles("foo", new Date(), "bar"); - ActionListener indexResponseListener = mock(ActionListener.class); + ActionListener indexResponseListener = mock(ActionListener.class); persister.persistQuantiles(quantiles, WriteRequest.RefreshPolicy.IMMEDIATE, indexResponseListener); InOrder inOrder = inOrder(client, indexResponseListener); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/MockAppender.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/MockAppender.java new file mode 100644 index 0000000000000..99c3c58f4ee81 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/MockAppender.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.test; + +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.filter.RegexFilter; +import org.apache.logging.log4j.message.Message; + +public class MockAppender extends AbstractAppender { + public LogEvent lastEvent; + + public MockAppender(final String name) throws IllegalAccessException { + super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null, false); + } + + @Override + public void append(LogEvent event) { + lastEvent = event.toImmutable(); + } + + Message lastMessage() { + return lastEvent.getMessage(); + } + + public LogEvent getLastEventAndReset() { + LogEvent toReturn = lastEvent; + lastEvent = null; + return toReturn; + } +} diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java index f15925b7c891b..0fb3bf540a4ed 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; @@ -72,7 +72,7 @@ protected boolean ignoreExternalCluster() { } private void indexDoc(String index, String id, Map source) { - IndexResponse indexResponse = client().prepareIndex(index).setId(id).setSource(source).setCreate(true).get(); + DocWriteResponse indexResponse = client().prepareIndex(index).setId(id).setSource(source).setCreate(true).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java index d9e3dff616671..f246a34f3362d 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java @@ -57,6 +57,11 @@ public EventsIndex getResampledIndex(long targetSampleSize, long currentSampleSi return EventsIndex.getSampledIndex(targetSampleSize, currentSampleSize, this.getExponent()); } + @Override + public String toString() { + return name; + } + // Return the index that has between targetSampleSize..targetSampleSize*samplingFactor entries. // The starting point is the number of entries from the profiling-events-5pow index. private static EventsIndex getSampledIndex(long targetSampleSize, long sampleCountFromInitialExp, int initialExp) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java index 8566978decaa8..ec8c85d39015e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java @@ -14,8 +14,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -29,7 +29,7 @@ protected GetStatusAction() { super(NAME, GetStatusAction.Response::new); } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { private boolean profilingEnabled; private boolean resourceManagementEnabled; @@ -102,7 +102,6 @@ public String toString() { return Strings.toString(this, true, true); } - @Override public RestStatus status() { return timedOut ? RestStatus.REQUEST_TIMEOUT : RestStatus.OK; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java index 714181f3dc0b5..e20d7a595d86d 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.util.List; @@ -34,6 +34,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient request.timeout(restRequest.paramAsTime("timeout", request.timeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.waitForResourcesCreated(restRequest.paramAsBoolean("wait_for_resources_created", false)); - return channel -> client.execute(GetStatusAction.INSTANCE, request, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute( + GetStatusAction.INSTANCE, + request, + new RestToXContentListener<>(channel, GetStatusAction.Response::status) + ); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 3a3c37b04b3d5..3b86e9040a9d3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -129,6 +129,13 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL .execute(ActionListener.wrap(searchResponse -> { long sampleCount = searchResponse.getHits().getTotalHits().value; EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); + log.debug( + "User requested [{}] samples, [{}] samples matched in [{}]. Picking [{}]", + request.getSampleSize(), + sampleCount, + mediumDownsampled, + resampledIndex + ); log.debug("getResampledIndex took [" + (System.nanoTime() - start) / 1_000_000.0d + " ms]."); searchEventGroupByStackTrace(client, request, resampledIndex, submitListener); }, e -> { @@ -184,14 +191,23 @@ private void searchEventGroupByStackTrace( // sort items lexicographically to access Lucene's term dictionary more efficiently when issuing an mget request. // The term dictionary is lexicographically sorted and using the same order reduces the number of page faults // needed to load it. + long totalFinalCount = 0; Map stackTraceEvents = new TreeMap<>(); for (StringTerms.Bucket bucket : stacktraces.getBuckets()) { Sum count = bucket.getAggregations().get("count"); int finalCount = resampler.adjustSampleCount((int) count.value()); + totalFinalCount += finalCount; if (finalCount > 0) { stackTraceEvents.put(bucket.getKeyAsString(), finalCount); } } + log.debug( + "Found [{}] stacktrace events, resampled with sample rate [{}] to [{}] events ([{}] unique stack traces).", + totalCount, + eventsIndex.getSampleRate(), + totalFinalCount, + stackTraceEvents.size() + ); log.debug("searchEventGroupByStackTrace took [" + (System.nanoTime() - start) / 1_000_000.0d + " ms]."); if (stackTraceEvents.isEmpty() == false) { responseBuilder.setStart(Instant.ofEpochMilli(minTime)); @@ -304,6 +320,12 @@ public void onResponse(MultiGetResponse multiGetItemResponses) { if (this.remainingSlices.decrementAndGet() == 0) { responseBuilder.setStackTraces(stackTracePerId); responseBuilder.setTotalFrames(totalFrames.get()); + log.debug( + "retrieveStackTraces found [{}] stack traces, [{}] frames, [{}] executables.", + stackTracePerId.size(), + stackFrameIds.size(), + executableIds.size() + ); log.debug("retrieveStackTraces took [" + (System.nanoTime() - start) / 1_000_000.0d + " ms]."); retrieveStackTraceDetails( clusterState, @@ -448,6 +470,7 @@ public void mayFinish() { if (expectedSlices.decrementAndGet() == 0) { builder.setExecutables(executables); builder.setStackFrames(stackFrames); + log.debug("retrieveStackTraceDetails found [{}] stack frames, [{}] executables.", stackFrames.size(), executables.size()); log.debug("retrieveStackTraceDetails took [" + (System.nanoTime() - start) / 1_000_000.0d + " ms]."); submitListener.onResponse(builder.build()); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java index 30e43e71c82bd..3943ddd3e207a 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/QlStatusResponse.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.async.StoredAsyncResponse; import org.elasticsearch.xpack.core.search.action.SearchStatusResponse; @@ -20,12 +20,10 @@ import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.rest.RestStatus.OK; - /** * A response for *QL search status request */ -public class QlStatusResponse extends ActionResponse implements SearchStatusResponse, StatusToXContentObject { +public class QlStatusResponse extends ActionResponse implements SearchStatusResponse, ToXContentObject { private final String id; private final boolean isRunning; private final boolean isPartial; @@ -115,11 +113,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public RestStatus status() { - return OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java index eb360257dfff1..a7019d76cdcd1 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.rank.rrf; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.io.stream.StreamInput; @@ -129,13 +129,13 @@ public void testCanMatchShard() throws IOException { int shardB = -1; for (int i = 0; i < 10; i++) { - IndexResponse ir = client().prepareIndex("value_index").setSource("value", "" + i).setRouting("a").get(); + DocWriteResponse ir = client().prepareIndex("value_index").setSource("value", "" + i).setRouting("a").get(); int a = ir.getShardId().id(); assertTrue(shardA == a || shardA == -1); shardA = a; } for (int i = 10; i < 20; i++) { - IndexResponse ir = client().prepareIndex("value_index").setSource("value", "" + i).setRouting("b").get(); + DocWriteResponse ir = client().prepareIndex("value_index").setSource("value", "" + i).setRouting("b").get(); int b = ir.getShardId().id(); assertTrue(shardB == b || shardB == -1); shardB = b; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java index 33e489ae20373..3662fc94e4c7c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java @@ -34,19 +34,12 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(ID.getPreferredName()); DeleteRollupJobAction.Request request = new DeleteRollupJobAction.Request(id); - return channel -> client.execute( - DeleteRollupJobAction.INSTANCE, - request, - new RestToXContentListener(channel) { - @Override - protected RestStatus getStatus(DeleteRollupJobAction.Response response) { - if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { - return RestStatus.INTERNAL_SERVER_ERROR; - } - return RestStatus.OK; - } + return channel -> client.execute(DeleteRollupJobAction.INSTANCE, request, new RestToXContentListener<>(channel, r -> { + if (r.getNodeFailures().size() > 0 || r.getTaskFailures().size() > 0) { + return RestStatus.INTERNAL_SERVER_ERROR; } - ); + return RestStatus.OK; + })); } @Override diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index 8f3a0d9b64238..fd5d401ca4b4b 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -462,8 +463,9 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - delegate.deleteBlobsIgnoringIfNotExists(blobNames); + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) + throws IOException { + delegate.deleteBlobsIgnoringIfNotExists(purpose, blobNames); } @Override @@ -485,8 +487,8 @@ class TrackingFilesBlobContainer extends FilterBlobContainer { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { - return new FilterInputStream(super.readBlob(blobName, position, length)) { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return new FilterInputStream(super.readBlob(purpose, blobName, position, length)) { long bytesRead = 0L; @Override diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java index b8e527f14312e..ebf1296da9f55 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java @@ -14,10 +14,10 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.blobcache.common.ByteRange; @@ -274,7 +274,7 @@ public final void putAsync( final ActionListener wrappedListener = ActionListener.runAfter(listener, release); innerPut(request, new ActionListener<>() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { logger.trace("cache fill ({}): [{}]", indexResponse.status(), request.id()); wrappedListener.onResponse(null); } @@ -297,7 +297,7 @@ public void onFailure(Exception e) { } } - protected void innerPut(final IndexRequest request, final ActionListener listener) { + protected void innerPut(final IndexRequest request, final ActionListener listener) { client.index(request, listener); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java index dc1b28aa6098d..7203b45e86efa 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -720,13 +721,13 @@ protected BlobContainer wrapChild(BlobContainer child) { } @Override - public InputStream readBlob(String blobName) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(blobName)); + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(blobName, position, length)); + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); } } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java index fccd85c25d69a..aab3e83a4f496 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java @@ -14,6 +14,7 @@ import org.elasticsearch.blobcache.common.BlobCacheBufferedIndexInput; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Streams; @@ -340,7 +341,7 @@ public String toString() { private InputStream openBlobStream(int part, long pos, long length) throws IOException { assert MetadataCachingIndexInput.assertCurrentThreadMayAccessBlobStore(); stats.addBlobStoreBytesRequested(length); - return blobContainer.readBlob(fileInfo.partName(part), pos, length); + return blobContainer.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(part), pos, length); } private static class StreamForSequentialReads implements Closeable { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index c4fa5efc1c012..2b61dc18e266c 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -20,6 +20,7 @@ import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.BlobCacheBufferedIndexInput; import org.elasticsearch.blobcache.common.ByteRange; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.core.Releasable; @@ -527,7 +528,7 @@ protected InputStream openInputStreamFromBlobStore(final long position, final lo assert position + readLength <= fileInfo.length() : "cannot read [" + position + "-" + (position + readLength) + "] from [" + fileInfo + "]"; stats.addBlobStoreBytesRequested(readLength); - return directory.blobContainer().readBlob(fileInfo.name(), position, readLength); + return directory.blobContainer().readBlob(OperationPurpose.SNAPSHOT, fileInfo.name(), position, readLength); } return openInputStreamMultipleParts(position, readLength); } @@ -556,7 +557,8 @@ protected InputStream openSlice(int slice) throws IOException { endInPart = currentPart == endPart ? getRelativePositionInPart(position + readLength - 1) + 1 : fileInfo.partBytes(currentPart); - return directory.blobContainer().readBlob(fileInfo.partName(currentPart), startInPart, endInPart - startInPart); + return directory.blobContainer() + .readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(currentPart), startInPart, endInPart - startInPart); } }; } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java index f3cac44d5b91b..d72a137285c6a 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.searchablesnapshots.cache.common; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; @@ -18,6 +19,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; @@ -114,7 +116,7 @@ public static void assertCounter(IndexInputStats.Counter counter, long total, lo public static BlobContainer singleBlobContainer(final String blobName, final byte[] blobContent) { return new MostlyUnimplementedFakeBlobContainer() { @Override - public InputStream readBlob(String name, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name, long position, long length) throws IOException { if (blobName.equals(name) == false) { throw new FileNotFoundException("Blob not found: " + name); } @@ -133,7 +135,7 @@ public static BlobContainer singleSplitBlobContainer(final String blobName, fina final String prefix = blobName + ".part"; return new MostlyUnimplementedFakeBlobContainer() { @Override - public InputStream readBlob(String name, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String name, long position, long length) throws IOException { if (name.startsWith(prefix) == false) { throw new FileNotFoundException("Blob not found: " + name); } @@ -174,7 +176,7 @@ public long readBlobPreferredLength() { } @Override - public Map listBlobs() { + public Map listBlobs(OperationPurpose purpose) { throw unsupportedException(); } @@ -184,27 +186,34 @@ public BlobPath path() { } @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { throw unsupportedException(); } @Override - public InputStream readBlob(String blobName) { + public InputStream readBlob(OperationPurpose purpose, String blobName) { throw unsupportedException(); } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { throw unsupportedException(); } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) { + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) { throw unsupportedException(); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -214,32 +223,33 @@ public void writeMetadataBlob( } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) { + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) { throw unsupportedException(); } @Override - public DeleteResult delete() { + public DeleteResult delete(OperationPurpose purpose) { throw unsupportedException(); } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { throw unsupportedException(); } @Override - public Map children() { + public Map children(OperationPurpose purpose) { throw unsupportedException(); } @Override - public Map listBlobsByPrefix(String blobNamePrefix) { + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) { throw unsupportedException(); } @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, @@ -271,7 +281,7 @@ protected void innerGet(GetRequest request, ActionListener listener } @Override - protected void innerPut(IndexRequest request, ActionListener listener) { + protected void innerPut(IndexRequest request, ActionListener listener) { listener.onFailure(new IndexNotFoundException(request.index())); } @@ -315,7 +325,7 @@ protected void innerGet(GetRequest request, ActionListener listener } @Override - protected void innerPut(IndexRequest request, ActionListener listener) { + protected void innerPut(IndexRequest request, ActionListener listener) { final BytesArray bytesArray = blobs.put(request.id(), new BytesArray(request.source().toBytesRef(), true)); listener.onResponse( new IndexResponse( diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInputTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInputTests.java index ec4eb8faef3dc..1495b6c5a99e2 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInputTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInputTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.blobcache.BlobCacheTestUtils; import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -287,8 +288,8 @@ private static class CountingBlobContainer extends FilterBlobContainer { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { - return new CountingInputStream(this, super.readBlob(blobName, position, length)); + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return new CountingInputStream(this, super.readBlob(purpose, blobName, position, length)); } @Override @@ -297,7 +298,7 @@ protected BlobContainer wrapChild(BlobContainer child) { } @Override - public InputStream readBlob(String name) { + public InputStream readBlob(OperationPurpose purpose, String name) { assert false : "this method should never be called"; throw new UnsupportedOperationException(); } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInputTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInputTests.java index 9fc95c137976c..63345d7447c44 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInputTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInputTests.java @@ -8,6 +8,7 @@ import org.apache.lucene.util.Version; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.lucene.store.ESIndexInputTestCase; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -33,6 +34,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; @@ -74,10 +76,10 @@ private DirectBlobContainerIndexInput createIndexInput( ); final BlobContainer blobContainer = mock(BlobContainer.class); - when(blobContainer.readBlob(anyString(), anyLong(), anyLong())).thenAnswer(invocationOnMock -> { - String name = (String) invocationOnMock.getArguments()[0]; - long position = (long) invocationOnMock.getArguments()[1]; - long length = (long) invocationOnMock.getArguments()[2]; + when(blobContainer.readBlob(any(OperationPurpose.class), anyString(), anyLong(), anyLong())).thenAnswer(invocationOnMock -> { + String name = (String) invocationOnMock.getArguments()[1]; + long position = (long) invocationOnMock.getArguments()[2]; + long length = (long) invocationOnMock.getArguments()[3]; assertThat( "Reading [" + length + "] bytes from [" + name + "] at [" + position + "] exceeds part size [" + partSize + "]", position + length, diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index 0612f2302404f..3a0fb370ac0d2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -67,7 +66,7 @@ public void testDateMathExpressionsCanBeAuthorized() throws Exception { CreateIndexResponse response = client.admin().indices().prepareCreate(expression).get(); assertThat(response.isAcknowledged(), is(true)); } - IndexResponse response = client.prepareIndex(expression) + DocWriteResponse response = client.prepareIndex(expression) .setSource("foo", "bar") .setRefreshPolicy(refeshOnOperation ? IMMEDIATE : NONE) .get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java index e221bad6b2c1c..50b54e2c1fa51 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; @@ -46,7 +45,7 @@ public void testCreateIndexDeleteInKibanaIndex() throws Exception { assertThat(createIndexResponse.isAcknowledged(), is(true)); } - IndexResponse response = client().filterWithHeader( + DocWriteResponse response = client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("my_kibana_system", USERS_PASSWD)) ).prepareIndex().setIndex(index).setSource("foo", "bar").setRefreshPolicy(IMMEDIATE).get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index 7493c95f8cebd..c9f987b1981a9 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; @@ -134,7 +133,7 @@ protected String configUsersRoles() { } public void testSingleRole() throws Exception { - IndexResponse indexResponse = index("test", jsonBuilder().startObject().field("name", "value").endObject()); + DocWriteResponse indexResponse = index("test", jsonBuilder().startObject().field("name", "value").endObject()); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = index("test1", jsonBuilder().startObject().field("name", "value1").endObject()); @@ -185,7 +184,7 @@ public void testSingleRole() throws Exception { public void testMonitorRestrictedWildcards() throws Exception { - IndexResponse indexResponse = index("foo", jsonBuilder().startObject().field("name", "value").endObject()); + DocWriteResponse indexResponse = index("foo", jsonBuilder().startObject().field("name", "value").endObject()); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = index("foobar", jsonBuilder().startObject().field("name", "value").endObject()); @@ -239,7 +238,7 @@ public void testMonitorRestrictedWildcards() throws Exception { } public void testMultipleRoles() throws Exception { - IndexResponse indexResponse = index("a", jsonBuilder().startObject().field("name", "value_a").endObject()); + DocWriteResponse indexResponse = index("a", jsonBuilder().startObject().field("name", "value_a").endObject()); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = index("b", jsonBuilder().startObject().field("name", "value_b").endObject()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/license/LicensingTests.java index df35e705e8303..27b85888864c9 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/license/LicensingTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsIndices; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; @@ -146,7 +145,7 @@ public void cleanupSecurityIndex() { } public void testEnableDisableBehaviour() throws Exception { - IndexResponse indexResponse = index("test", jsonBuilder().startObject().field("name", "value").endObject()); + DocWriteResponse indexResponse = index("test", jsonBuilder().startObject().field("name", "value").endObject()); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = index("test1", jsonBuilder().startObject().field("name", "value1").endObject()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index e05943210e8df..76029b779d8d9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ContextPreservingActionListener; @@ -327,7 +326,7 @@ private void updateReservedUser( ) .setRefreshPolicy(refresh) .request(), - listener.delegateFailure((l, indexResponse) -> clearRealmCache(username, l, null)), + listener.delegateFailure((l, indexResponse) -> clearRealmCache(username, l, null)), client::index ); }); @@ -433,7 +432,7 @@ private void indexUser(final PutUserRequest putUserRequest, final ActionListener ) .setRefreshPolicy(putUserRequest.getRefreshPolicy()) .request(), - listener.delegateFailure( + listener.delegateFailure( (l, updateResponse) -> clearRealmCache( putUserRequest.username(), l, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java index 91224a8246169..b1ee1b77998ec 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtSignatureValidator.java @@ -17,7 +17,6 @@ import com.nimbusds.jose.jwk.JWK; import com.nimbusds.jose.jwk.OctetSequenceKey; import com.nimbusds.jose.jwk.RSAKey; -import com.nimbusds.jose.util.Base64URL; import com.nimbusds.jwt.SignedJWT; import org.apache.logging.log4j.LogManager; @@ -40,9 +39,10 @@ import java.util.Arrays; import java.util.List; -import java.util.function.Supplier; import java.util.stream.Stream; +import static org.elasticsearch.xpack.security.authc.jwt.JwtUtil.toStringRedactSignature; + public interface JwtSignatureValidator extends Releasable { Logger logger = LogManager.getLogger(JwtSignatureValidator.class); @@ -361,7 +361,7 @@ default void validateSignature(final SignedJWT jwt, final List jwks) throws final String id = jwt.getHeader().getKeyID(); final JWSAlgorithm alg = jwt.getHeader().getAlgorithm(); - tracer.append("Filtering [{}] possible JWKs to verifying signature for JWT [{}].", jwks.size(), getSafePrintableJWT(jwt)); + tracer.append("Filtering [{}] possible JWKs to verifying signature for JWT [{}].", jwks.size(), toStringRedactSignature(jwt)); // If JWT has optional kid header, and realm JWKs have optional kid attribute, any mismatches JWT.kid vs JWK.kid can be ignored. // Keep any JWKs if JWK optional kid attribute is missing. Keep all JWKs if JWT optional kid header is missing. @@ -399,7 +399,11 @@ default void validateSignature(final SignedJWT jwt, final List jwks) throws int attempt = 0; int maxAttempts = jwksConfigured.size(); - tracer.append("Attempting to verify signature for JWT [{}] against [{}] possible JWKs.", getSafePrintableJWT(jwt), maxAttempts); + tracer.append( + "Attempting to verify signature for JWT [{}] against [{}] possible JWKs.", + toStringRedactSignature(jwt), + maxAttempts + ); for (final JWK jwk : jwksConfigured) { attempt++; if (jwt.verify(createJwsVerifier(jwk))) { @@ -429,7 +433,7 @@ default void validateSignature(final SignedJWT jwt, final List jwks) throws ); } } - throw new ElasticsearchException("JWT [" + getSafePrintableJWT(jwt).get() + "] signature verification failed."); + throw new ElasticsearchException("JWT [" + toStringRedactSignature(jwt).get() + "] signature verification failed."); } } @@ -458,15 +462,4 @@ interface PkcJwkSetReloadNotifier { void reloaded(); } - /** - * @param jwt The signed JWT - * @return A print safe supplier to describe a JWT that redacts the signature. While the signature is not generally sensitive, - * we don't want to leak the entire JWT to the log to avoid a possible replay. - */ - private Supplier getSafePrintableJWT(SignedJWT jwt) { - Base64URL[] parts = jwt.getParsedParts(); - assert parts.length == 3; - return () -> parts[0].toString() + "." + parts[1].toString() + "."; - } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java index 3e3533f028b38..9168c5c0925bd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.security.authc.jwt; +import com.nimbusds.jose.JWSObject; import com.nimbusds.jose.jwk.JWK; import com.nimbusds.jose.jwk.JWKSet; +import com.nimbusds.jose.util.Base64URL; import com.nimbusds.jose.util.JSONObjectUtils; +import com.nimbusds.jwt.JWT; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; @@ -59,6 +62,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.function.Supplier; import javax.net.ssl.HostnameVerifier; @@ -389,4 +393,23 @@ public void close() { closed = true; } } + + /** + * @param jwt The signed JWT + * @return A print safe supplier to describe a JWT that redacts the signature. While the signature is not generally sensitive, + * we don't want to leak the entire JWT to the log to avoid a possible replay. + */ + public static Supplier toStringRedactSignature(JWT jwt) { + if (jwt instanceof JWSObject) { + Base64URL[] parts = jwt.getParsedParts(); + assert parts.length == 3; + assert parts[0] != null; + assert parts[1] != null; + assert parts[2] != null; + assert Objects.equals(parts[2], ((JWSObject) jwt).getSignature()); + return () -> parts[0] + "." + parts[1] + "."; + } else { + return jwt::getParsedString; + } + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java index 73bc36c94e2d5..754d2a82dd835 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticator.java @@ -93,6 +93,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.authc.jwt.JwtUtil; import java.io.IOException; import java.net.URI; @@ -293,14 +294,18 @@ void getUserClaims( .triggerReload(ActionListener.wrap(v -> { getUserClaims(accessToken, idToken, expectedNonce, false, claimsListener); }, ex -> { - LOGGER.trace("Attempted and failed to refresh JWK cache upon token validation failure", e); + LOGGER.debug("Attempted and failed to refresh JWK cache upon token validation failure", e); claimsListener.onFailure(ex); })); } else { + LOGGER.debug("Failed to parse or validate the ID Token", e); claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e)); } } catch (com.nimbusds.oauth2.sdk.ParseException | ParseException | JOSEException e) { - LOGGER.debug("ID Token: [{}], Nonce: [{}]", idToken.getParsedString(), expectedNonce); + LOGGER.debug( + () -> format("ID Token: [%s], Nonce: [%s]", JwtUtil.toStringRedactSignature(idToken).get(), expectedNonce.toString()), + e + ); claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 7fff5e646b2a4..f49558ad6875d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -9,8 +9,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; @@ -226,9 +226,9 @@ private void innerPutMapping(PutRoleMappingRequest request, ActionListener() { + new ActionListener() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { boolean created = indexResponse.getResult() == CREATED; listener.onResponse(created); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index c7a6c1fbd498d..d89202909da27 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -969,7 +969,7 @@ static final class AuthorizedIndices implements AuthorizationEngine.AuthorizedIn private final Predicate isAuthorizedPredicate; AuthorizedIndices(Supplier> allAuthorizedAndAvailableSupplier, Predicate isAuthorizedPredicate) { - this.allAuthorizedAndAvailableSupplier = new CachedSupplier<>(allAuthorizedAndAvailableSupplier); + this.allAuthorizedAndAvailableSupplier = CachedSupplier.wrap(allAuthorizedAndAvailableSupplier); this.isAuthorizedPredicate = Objects.requireNonNull(isAuthorizedPredicate); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 592fa16b79ff7..10f5539b953b6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; @@ -368,9 +367,9 @@ public void putPrivileges( ActionListener>> listener ) { securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - ActionListener groupListener = new GroupedActionListener<>( + ActionListener groupListener = new GroupedActionListener<>( privileges.size(), - ActionListener.wrap((Collection responses) -> { + ActionListener.wrap((Collection responses) -> { final Map> createdNames = responses.stream() .filter(r -> r.getResult() == DocWriteResponse.Result.CREATED) .map(r -> r.getId()) @@ -392,7 +391,7 @@ public void putPrivileges( private void innerPutPrivilege( ApplicationPrivilegeDescriptor privilege, WriteRequest.RefreshPolicy refreshPolicy, - ActionListener listener + ActionListener listener ) { try { final String name = privilege.getName(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 26a73c9201622..085863fdb5e31 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.action.search.SearchRequest; @@ -289,9 +288,9 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest, - new ActionListener() { + new ActionListener() { @Override - public void onResponse(IndexResponse indexResponse) { + public void onResponse(DocWriteResponse indexResponse) { final boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; logger.trace("Created role: [{}]", indexRequest); clearRoleCache(roleName, listener, created); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/service/RestDeleteServiceAccountTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/service/RestDeleteServiceAccountTokenAction.java index 3343bb63d10d5..b12f1b8b3c8d1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/service/RestDeleteServiceAccountTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/service/RestDeleteServiceAccountTokenAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenAction; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenRequest; -import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenResponse; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; import java.io.IOException; @@ -54,12 +53,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien return channel -> client.execute( DeleteServiceAccountTokenAction.INSTANCE, deleteServiceAccountTokenRequest, - new RestToXContentListener<>(channel) { - @Override - protected RestStatus getStatus(DeleteServiceAccountTokenResponse response) { - return response.found() ? RestStatus.OK : RestStatus.NOT_FOUND; - } - } + new RestToXContentListener<>(channel, r -> r.found() ? RestStatus.OK : RestStatus.NOT_FOUND) ); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java index ac1b2f30ec06d..b38772ee2cc5e 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotState; @@ -91,15 +92,6 @@ public SnapshotRetentionTask( this.historyStore = historyStore; } - private static String formatSnapshots(Map> snapshotMap) { - return snapshotMap.entrySet() - .stream() - .map( - e -> e.getKey() + ": [" + e.getValue().stream().map(si -> si.snapshotId().getName()).collect(Collectors.joining(",")) + "]" - ) - .collect(Collectors.joining(",")); - } - @Override public void triggered(SchedulerEngine.Event event) { assert event.getJobName().equals(SnapshotRetentionService.SLM_RETENTION_JOB_ID) @@ -156,28 +148,9 @@ public void triggered(SchedulerEngine.Event event) { // Finally, asynchronously retrieve all the snapshots, deleting them serially, // before updating the cluster state with the new metrics and setting 'running' // back to false - getAllRetainableSnapshots(repositioriesToFetch, policiesWithRetention.keySet(), new ActionListener<>() { + getSnapshotsEligibleForDeletion(repositioriesToFetch, policiesWithRetention, new ActionListener<>() { @Override - public void onResponse(Map> allSnapshots) { - if (logger.isTraceEnabled()) { - logger.trace("retrieved snapshots: [{}]", formatSnapshots(allSnapshots)); - } - // Find all the snapshots that are past their retention date - final Map>> snapshotsToBeDeleted = allSnapshots.entrySet() - .stream() - .collect( - Collectors.toMap( - Map.Entry::getKey, - e -> e.getValue() - .stream() - .filter(snapshot -> snapshotEligibleForDeletion(snapshot, allSnapshots, policiesWithRetention)) - // SnapshotInfo instances can be quite large in case they contain e.g. a large collection of - // exceptions so we extract the only two things (id + policy id) here so they can be GCed - .map(snapshotInfo -> Tuple.tuple(snapshotInfo.snapshotId(), getPolicyId(snapshotInfo))) - .toList() - ) - ); - + public void onResponse(Map>> snapshotsToBeDeleted) { if (logger.isTraceEnabled()) { logger.trace("snapshots eligible for deletion: [{}]", snapshotsToBeDeleted); } @@ -214,7 +187,7 @@ static Map getAllPoliciesWithRetentionEnabled(f static boolean snapshotEligibleForDeletion( SnapshotInfo snapshot, - Map> allSnapshots, + Map>> allSnapshotDetails, Map policies ) { assert snapshot.userMetadata() != null @@ -236,17 +209,13 @@ static boolean snapshotEligibleForDeletion( final String repository = policy.getRepository(); // Retrieve the predicate based on the retention policy, passing in snapshots pertaining only to *this* policy and repository - boolean eligible = retention.getSnapshotDeletionPredicate( - allSnapshots.get(repository) - .stream() - .filter( - info -> Optional.ofNullable(info.userMetadata()) - .map(meta -> meta.get(POLICY_ID_METADATA_FIELD)) - .map(pId -> pId.equals(policyId)) - .orElse(false) - ) - .toList() - ).test(snapshot); + final var relevantSnapshots = allSnapshotDetails.getOrDefault(repository, Map.of()).getOrDefault(policyId, Map.of()); + assert relevantSnapshots.containsKey(snapshot.snapshotId()); + boolean eligible = retention.isSnapshotEligibleForDeletion( + snapshot.snapshotId(), + RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshot), + relevantSnapshots + ); logger.debug( "[{}] testing snapshot [{}] deletion eligibility: {}", repository, @@ -256,10 +225,10 @@ static boolean snapshotEligibleForDeletion( return eligible; } - void getAllRetainableSnapshots( + void getSnapshotsEligibleForDeletion( Collection repositories, - Set policies, - ActionListener>> listener + Map policies, + ActionListener>>> listener ) { if (repositories.isEmpty()) { // Skip retrieving anything if there are no repositories to fetch @@ -273,7 +242,7 @@ void getAllRetainableSnapshots( // don't time out on this request to not produce failed SLM runs in case of a temporarily slow master node .setMasterNodeTimeout(TimeValue.MAX_VALUE) .setIgnoreUnavailable(true) - .setPolicies(policies.toArray(Strings.EMPTY_ARRAY)) + .setPolicies(policies.keySet().toArray(Strings.EMPTY_ARRAY)) .setIncludeIndexNames(false) .execute(ActionListener.wrap(resp -> { if (logger.isTraceEnabled()) { @@ -300,7 +269,57 @@ void getAllRetainableSnapshots( logger.debug(() -> "unable to retrieve snapshots for [" + repo + "] repositories: ", resp.getFailures().get(repo)); } } - listener.onResponse(snapshots); + + if (logger.isTraceEnabled()) { + logger.trace( + "retrieved snapshots: [{}]", + snapshots.entrySet() + .stream() + .map( + e -> e.getKey() + + ": [" + + e.getValue().stream().map(si -> si.snapshotId().getName()).collect(Collectors.joining(",")) + + "]" + ) + .collect(Collectors.joining(",")) + ); + } + + // Repository name -> Retention policy ID -> (SnapshotId, SnapshotDetails) + final Map>> allSnapshotDetails = new HashMap<>(); + // TODO should we make this properly immutable or is its scope small enough that we don't need it? + for (Map.Entry> repositorySnapshots : snapshots.entrySet()) { + final var repositoryName = repositorySnapshots.getKey(); + final var repositorySnapshotDetails = allSnapshotDetails.computeIfAbsent(repositoryName, ignored -> new HashMap<>()); + for (SnapshotInfo snapshotInfo : repositorySnapshots.getValue()) { + final var snapshotId = snapshotInfo.snapshotId(); + final var snapshotDetails = RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshotInfo); + final var slmPolicy = snapshotDetails.getSlmPolicy(); + if (Strings.hasText(slmPolicy)) { + final var previousDetails = repositorySnapshotDetails.computeIfAbsent(slmPolicy, ignored -> new HashMap<>()) + .put(snapshotId, snapshotDetails); + assert previousDetails == null : previousDetails; + } + } + } + + // Find all the snapshots that are past their retention date + final Map>> snapshotsToBeDeleted = snapshots.entrySet() + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> e.getValue() + .stream() + .filter(snapshot -> snapshotEligibleForDeletion(snapshot, allSnapshotDetails, policies)) + // SnapshotInfo instances can be quite large in case they contain e.g. a large collection of + // exceptions so we extract the only two things (id + policy id) here so they can be GCed + .map(snapshotInfo -> Tuple.tuple(snapshotInfo.snapshotId(), getPolicyId(snapshotInfo))) + .toList() + ) + ); + + listener.onResponse(snapshotsToBeDeleted); }, e -> { logger.debug(() -> "unable to retrieve snapshots for [" + repositories + "] repositories: ", e); listener.onFailure(e); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java index 15badabf3689a..90488c33edba2 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; @@ -42,7 +44,6 @@ import org.elasticsearch.xpack.core.slm.SnapshotRetentionConfiguration; import org.elasticsearch.xpack.slm.history.SnapshotHistoryStore; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -133,10 +134,10 @@ public void testSnapshotEligibleForDeletion() { new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null) ); Map policyMap = Collections.singletonMap("policy", policy); - Function>> mkInfos = i -> Collections.singletonMap( - repoName, - Collections.singletonList(i) - ); + Function>>> mkInfos = snapshotInfo -> { + final var snapshotDetails = RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshotInfo); + return Map.of(repoName, Map.of(snapshotDetails.getSlmPolicy(), Map.of(snapshotInfo.snapshotId(), snapshotDetails))); + }; // Test with an ancient snapshot that should be expunged SnapshotInfo info = new SnapshotInfo( @@ -238,20 +239,6 @@ private void retentionTaskTest(final boolean deletionSuccess) throws Exception { 0L, Collections.emptyMap() ); - final SnapshotInfo ineligibleSnapshot = new SnapshotInfo( - new Snapshot(repoId, new SnapshotId("name2", "uuid2")), - Collections.singletonList("index"), - Collections.emptyList(), - Collections.emptyList(), - null, - System.currentTimeMillis() + 1, - 1, - Collections.emptyList(), - true, - Collections.singletonMap("policy", policyId), - System.currentTimeMillis(), - Collections.emptyMap() - ); Set deleted = ConcurrentHashMap.newKeySet(); Set deletedSnapshotsInHistory = ConcurrentHashMap.newKeySet(); @@ -273,11 +260,9 @@ private void retentionTaskTest(final boolean deletionSuccess) throws Exception { historyLatch.countDown(); }), () -> { - List snaps = new ArrayList<>(2); - snaps.add(eligibleSnapshot); - snaps.add(ineligibleSnapshot); - logger.info("--> retrieving snapshots [{}]", snaps); - return Collections.singletonMap(repoId, snaps); + final var result = Collections.singletonMap(repoId, List.of(Tuple.tuple(eligibleSnapshot.snapshotId(), policyId))); + logger.info("--> retrieving snapshots [{}]", result); + return result; }, (deletionPolicyId, repo, snapId, slmStats, listener) -> { logger.info("--> deleting {} from repo {}", snapId, repo); @@ -295,7 +280,7 @@ private void retentionTaskTest(final boolean deletionSuccess) throws Exception { long time = System.currentTimeMillis(); retentionTask.triggered(new SchedulerEngine.Event(SnapshotRetentionService.SLM_RETENTION_JOB_ID, time, time)); - deletionLatch.await(10, TimeUnit.SECONDS); + safeAwait(deletionLatch); assertThat("something should have been deleted", deleted, not(empty())); assertThat("one snapshot should have been deleted", deleted, hasSize(1)); @@ -364,18 +349,22 @@ protected void ); AtomicReference errHandlerCalled = new AtomicReference<>(null); - task.getAllRetainableSnapshots(Collections.singleton(repoId), Collections.singleton(policyId), new ActionListener<>() { - @Override - public void onResponse(Map> stringListMap) { - logger.info("--> forcing failure"); - throw new ElasticsearchException("forced failure"); - } + task.getSnapshotsEligibleForDeletion( + Collections.singleton(repoId), + Map.of(policyId, new SnapshotLifecyclePolicy(policyId, "test", "* * * * *", repoId, null, null)), + new ActionListener<>() { + @Override + public void onResponse(Map>> snapshotsToBeDeleted) { + logger.info("--> forcing failure"); + throw new ElasticsearchException("forced failure"); + } - @Override - public void onFailure(Exception e) { - errHandlerCalled.set(e); + @Override + public void onFailure(Exception e) { + errHandlerCalled.set(e); + } } - }); + ); assertNotNull(errHandlerCalled.get()); assertThat(errHandlerCalled.get().getMessage(), equalTo("forced failure")); @@ -597,14 +586,14 @@ public ClusterState createState(OperationMode mode, SnapshotLifecyclePolicy... p } private static class MockSnapshotRetentionTask extends SnapshotRetentionTask { - private final Supplier>> snapshotRetriever; + private final Supplier>>> snapshotRetriever; private final DeleteSnapshotMock deleteRunner; MockSnapshotRetentionTask( Client client, ClusterService clusterService, SnapshotHistoryStore historyStore, - Supplier>> snapshotRetriever, + Supplier>>> snapshotRetriever, DeleteSnapshotMock deleteRunner, LongSupplier nanoSupplier ) { @@ -614,10 +603,10 @@ private static class MockSnapshotRetentionTask extends SnapshotRetentionTask { } @Override - void getAllRetainableSnapshots( + void getSnapshotsEligibleForDeletion( Collection repositories, - Set policies, - ActionListener>> listener + Map policies, + ActionListener>>> listener ) { listener.onResponse(this.snapshotRetriever.get()); } diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index 6c3708667b769..e0ea9d4ff076c 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -182,13 +183,13 @@ protected BlobContainer wrapChild(BlobContainer child) { } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { // Take into account only index files if (blobName.startsWith("__") == false) { - return super.readBlob(blobName); + return super.readBlob(purpose, blobName); } - return new FilterInputStream(super.readBlob(blobName)) { + return new FilterInputStream(super.readBlob(purpose, blobName)) { @Override public int read(byte[] b, int off, int len) throws IOException { int read = super.read(b, off, len); @@ -224,13 +225,13 @@ protected BlobContainer wrapChild(BlobContainer child) { } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { // Fail only in index files if (blobName.startsWith("__") == false) { - return super.readBlob(blobName); + return super.readBlob(purpose, blobName); } - return new FilterInputStream(super.readBlob(blobName)) { + return new FilterInputStream(super.readBlob(purpose, blobName)) { @Override public int read(byte[] b, int off, int len) throws IOException { if (randomBoolean()) { @@ -287,9 +288,9 @@ protected BlobContainer wrapChild(BlobContainer child) { } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { BiFunction delegateSupplier = delegateSupplierRef.get(); - return delegateSupplier.apply(blobName, super.readBlob(blobName)); + return delegateSupplier.apply(blobName, super.readBlob(purpose, blobName)); } }; } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java index a5ce2b49d9c2a..ba132938b238e 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisFailureIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesReference; @@ -35,7 +36,6 @@ import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; @@ -97,8 +97,7 @@ public void testSuccess() { request.blobCount(1); request.maxBlobSize(ByteSizeValue.ofBytes(10L)); - final RepositoryAnalyzeAction.Response response = analyseRepository(request); - assertThat(response.status(), equalTo(RestStatus.OK)); + analyseRepository(request); } public void testFailsOnReadError() { @@ -421,7 +420,7 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) {} + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) {} private void deleteContainer(DisruptableBlobContainer container) { blobContainer = null; @@ -482,12 +481,12 @@ public BlobPath path() { } @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { return blobs.containsKey(blobName); } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { final byte[] actualContents = blobs.get(blobName); final byte[] disruptedContents = disruption.onRead(actualContents, 0L, actualContents == null ? 0L : actualContents.length); if (disruptedContents == null) { @@ -497,7 +496,7 @@ public InputStream readBlob(String blobName) throws IOException { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { final byte[] actualContents = blobs.get(blobName); final byte[] disruptedContents = disruption.onRead(actualContents, position, length); if (disruptedContents == null) { @@ -508,17 +507,25 @@ public InputStream readBlob(String blobName, long position, long length) throws } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) throws IOException { writeBlobAtomic(blobName, inputStream, failIfAlreadyExists); } @Override - public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + writeBlob(OperationPurpose.SNAPSHOT, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -527,14 +534,15 @@ public void writeMetadataBlob( final BytesStreamOutput out = new BytesStreamOutput(); writer.accept(out); if (atomic) { - writeBlobAtomic(blobName, out.bytes(), failIfAlreadyExists); + writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); } else { - writeBlob(blobName, out.bytes(), failIfAlreadyExists); + writeBlob(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); } } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { final StreamInput inputStream; try { inputStream = bytes.streamInput(); @@ -566,7 +574,7 @@ private void writeBlobAtomic(String blobName, InputStream inputStream, boolean f } @Override - public DeleteResult delete() throws IOException { + public DeleteResult delete(OperationPurpose purpose) throws IOException { disruption.onDelete(); deleteContainer.accept(this); final DeleteResult deleteResult = new DeleteResult(blobs.size(), blobs.values().stream().mapToLong(b -> b.length).sum()); @@ -575,12 +583,12 @@ public DeleteResult delete() throws IOException { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { blobNames.forEachRemaining(blobs.keySet()::remove); } @Override - public Map listBlobs() throws IOException { + public Map listBlobs(OperationPurpose purpose) throws IOException { return disruption.onList( blobs.entrySet() .stream() @@ -589,19 +597,20 @@ public Map listBlobs() throws IOException { } @Override - public Map children() { + public Map children(OperationPurpose purpose) { return Map.of(); } @Override - public Map listBlobsByPrefix(String blobNamePrefix) throws IOException { - final Map blobMetadataByName = listBlobs(); + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { + final Map blobMetadataByName = listBlobs(OperationPurpose.SNAPSHOT); blobMetadataByName.keySet().removeIf(s -> s.startsWith(blobNamePrefix) == false); return blobMetadataByName; } @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java index b29940964e942..45b1bdc756789 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; @@ -34,7 +35,6 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; @@ -124,10 +124,8 @@ public void testRepositoryAnalysis() { request.timeout(TimeValue.timeValueSeconds(20)); - final RepositoryAnalyzeAction.Response response = client().execute(RepositoryAnalyzeAction.INSTANCE, request) - .actionGet(30L, TimeUnit.SECONDS); + client().execute(RepositoryAnalyzeAction.INSTANCE, request).actionGet(30L, TimeUnit.SECONDS); - assertThat(response.status(), equalTo(RestStatus.OK)); assertThat(blobStore.currentPath, nullValue()); } @@ -241,7 +239,7 @@ private void deleteContainer(AssertingBlobContainer container) { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) {} + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) {} @Override public void close() {} @@ -300,12 +298,12 @@ public BlobPath path() { } @Override - public boolean blobExists(String blobName) { + public boolean blobExists(OperationPurpose purpose, String blobName) { return blobs.containsKey(blobName); } @Override - public InputStream readBlob(String blobName) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { final byte[] contents = blobs.get(blobName); if (contents == null) { throw new FileNotFoundException(blobName + " not found"); @@ -314,7 +312,7 @@ public InputStream readBlob(String blobName) throws IOException { } @Override - public InputStream readBlob(String blobName, long position, long length) throws IOException { + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { final byte[] contents = blobs.get(blobName); if (contents == null) { throw new FileNotFoundException(blobName + " not found"); @@ -324,7 +322,13 @@ public InputStream readBlob(String blobName, long position, long length) throws } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) throws IOException { assertTrue("must only write blob [" + blobName + "] non-atomically if it doesn't already exist", failIfAlreadyExists); assertNull("blob [" + blobName + "] must not exist", blobs.get(blobName)); @@ -333,12 +337,14 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } @Override - public void writeBlob(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + writeBlob(OperationPurpose.SNAPSHOT, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @Override public void writeMetadataBlob( + OperationPurpose purpose, String blobName, boolean failIfAlreadyExists, boolean atomic, @@ -347,14 +353,15 @@ public void writeMetadataBlob( final BytesStreamOutput out = new BytesStreamOutput(); writer.accept(out); if (atomic) { - writeBlobAtomic(blobName, out.bytes(), failIfAlreadyExists); + writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); } else { - writeBlob(blobName, out.bytes(), failIfAlreadyExists); + writeBlob(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); } } @Override - public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { writeBlobAtomic(blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @@ -383,7 +390,7 @@ private void writeBlobAtomic(String blobName, InputStream inputStream, long blob } @Override - public DeleteResult delete() { + public DeleteResult delete(OperationPurpose purpose) { deleteContainer.accept(this); final DeleteResult deleteResult = new DeleteResult(blobs.size(), blobs.values().stream().mapToLong(b -> b.length).sum()); blobs.clear(); @@ -391,31 +398,31 @@ public DeleteResult delete() { } @Override - public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) { + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { blobNames.forEachRemaining(blobs.keySet()::remove); } @Override - public Map listBlobs() { + public Map listBlobs(OperationPurpose purpose) { return blobs.entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> new BlobMetadata(e.getKey(), e.getValue().length))); } @Override - public Map children() { + public Map children(OperationPurpose purpose) { return Map.of(); } @Override - public Map listBlobsByPrefix(String blobNamePrefix) { - final Map blobMetadataByName = listBlobs(); + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) { + final Map blobMetadataByName = listBlobs(OperationPurpose.SNAPSHOT); blobMetadataByName.keySet().removeIf(s -> s.startsWith(blobNamePrefix) == false); return blobMetadataByName; } @Override - public void getRegister(String key, ActionListener listener) { + public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { if (firstRegisterRead.compareAndSet(true, false) && randomBoolean() && randomBoolean()) { // only fail the first read, we must not fail the final check listener.onResponse(OptionalBytesReference.EMPTY); @@ -423,12 +430,13 @@ public void getRegister(String key, ActionListener liste listener.onResponse(OptionalBytesReference.of(registers.computeIfAbsent(key, ignored -> new BytesRegister()).get())); } else { final var bogus = randomFrom(BytesArray.EMPTY, new BytesArray(new byte[] { randomByte() })); - compareAndExchangeRegister(key, bogus, bogus, listener); + compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, bogus, bogus, listener); } } @Override public void compareAndExchangeRegister( + OperationPurpose purpose, String key, BytesReference expected, BytesReference updated, diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java index 5c9e0fc0b9202..72adf752737fc 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.inject.Inject; @@ -347,17 +348,18 @@ public StreamInput streamInput() throws IOException { }; if (atomic) { try { - blobContainer.writeBlobAtomic(request.blobName, bytesReference, failIfExists); + blobContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT, request.blobName, bytesReference, failIfExists); } catch (BlobWriteAbortedException e) { assert request.getAbortWrite() : "write unexpectedly aborted"; } } else { - blobContainer.writeBlob(request.blobName, bytesReference, failIfExists); + blobContainer.writeBlob(OperationPurpose.SNAPSHOT, request.blobName, bytesReference, failIfExists); } } else { cancellableThreads.execute(() -> { try { blobContainer.writeBlob( + OperationPurpose.SNAPSHOT, request.blobName, repository.maybeRateLimitSnapshots( new RandomBlobContentStream(content, request.getTargetLength()), @@ -476,7 +478,7 @@ private void cleanUpAndReturnFailure(Exception exception) { logger.trace(() -> "analysis failed [" + request.getDescription() + "] cleaning up", exception); } try { - blobContainer.deleteBlobsIgnoringIfNotExists(Iterators.single(request.blobName)); + blobContainer.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, Iterators.single(request.blobName)); } catch (IOException ioException) { exception.addSuppressed(ioException); logger.warn( diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java index bb38c3a0c8b7b..c27271e28130b 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -97,9 +98,14 @@ protected void doExecute(Task task, Request request, ActionListener li final InputStream rawInputStream; try { if (request.isWholeBlob()) { - rawInputStream = blobContainer.readBlob(request.getBlobName()); + rawInputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, request.getBlobName()); } else { - rawInputStream = blobContainer.readBlob(request.getBlobName(), request.getRangeStart(), request.getRangeLength()); + rawInputStream = blobContainer.readBlob( + OperationPurpose.SNAPSHOT, + request.getBlobName(), + request.getRangeStart(), + request.getRangeLength() + ); } } catch (FileNotFoundException | NoSuchFileException e) { logger.trace("blob not found for [{}]", request); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java index 985a241f05b14..641d18c4204b8 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -120,6 +121,7 @@ class Execution extends ActionRunnable { protected void doRun() { if (((CancellableTask) task).notifyIfCancelled(listener) == false) { blobContainer.compareAndExchangeRegister( + OperationPurpose.SNAPSHOT, registerName, bytesFromLong(currentValue), bytesFromLong(currentValue + 1L), @@ -167,9 +169,10 @@ public void onFailure(Exception e) { }; if (request.getInitialRead() > request.getRequestCount()) { - blobContainer.getRegister(registerName, initialValueListener); + blobContainer.getRegister(OperationPurpose.SNAPSHOT, registerName, initialValueListener); } else { blobContainer.compareAndExchangeRegister( + OperationPurpose.SNAPSHOT, registerName, bytesFromLong(request.getInitialRead()), bytesFromLong( diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java index 5cd74f0869802..d8e3c82433704 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; @@ -39,7 +40,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThrottledIterator; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -47,7 +47,6 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; @@ -57,6 +56,7 @@ import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -630,14 +630,16 @@ public void onFailure(Exception exp) { } }, ref), listener -> { switch (random.nextInt(3)) { - case 0 -> getBlobContainer().getRegister(registerName, listener); + case 0 -> getBlobContainer().getRegister(OperationPurpose.SNAPSHOT, registerName, listener); case 1 -> getBlobContainer().compareAndExchangeRegister( + OperationPurpose.SNAPSHOT, registerName, RegisterAnalyzeAction.bytesFromLong(expectedFinalRegisterValue), new BytesArray(new byte[] { (byte) 0xff }), listener ); case 2 -> getBlobContainer().compareAndSetRegister( + OperationPurpose.SNAPSHOT, registerName, RegisterAnalyzeAction.bytesFromLong(expectedFinalRegisterValue), new BytesArray(new byte[] { (byte) 0xff }), @@ -687,7 +689,7 @@ private void ensureConsistentListing() { try { final BlobContainer blobContainer = getBlobContainer(); final Set missingBlobs = new HashSet<>(expectedBlobs); - final Map blobsMap = blobContainer.listBlobs(); + final Map blobsMap = blobContainer.listBlobs(OperationPurpose.SNAPSHOT); missingBlobs.removeAll(blobsMap.keySet()); if (missingBlobs.isEmpty()) { @@ -710,11 +712,11 @@ private void ensureConsistentListing() { private void deleteContainer() { try { final BlobContainer blobContainer = getBlobContainer(); - blobContainer.delete(); + blobContainer.delete(OperationPurpose.SNAPSHOT); if (failure.get() != null) { return; } - final Map blobsMap = blobContainer.listBlobs(); + final Map blobsMap = blobContainer.listBlobs(OperationPurpose.SNAPSHOT); if (blobsMap.isEmpty() == false) { final RepositoryVerificationException repositoryVerificationException = new RepositoryVerificationException( request.repositoryName, @@ -1013,7 +1015,7 @@ public void reseed(long newSeed) { } - public static class Response extends ActionResponse implements StatusToXContentObject { + public static class Response extends ActionResponse implements ToXContentObject { private final String coordinatingNodeId; private final String coordinatingNodeName; @@ -1108,11 +1110,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(deleteTimeNanos); } - @Override - public RestStatus status() { - return RestStatus.OK; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RestRepositoryAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RestRepositoryAnalyzeAction.java index b1e3e36a8d922..8dce1d4600278 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RestRepositoryAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RestRepositoryAnalyzeAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentBuilder; import java.util.List; @@ -62,7 +62,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> cancelClient.execute( RepositoryAnalyzeAction.INSTANCE, analyzeRepositoryRequest, - new RestStatusToXContentListener<>(channel) { + new RestToXContentListener<>(channel) { @Override public RestResponse buildResponse(RepositoryAnalyzeAction.Response response, XContentBuilder builder) throws Exception { builder.humanReadable(request.paramAsBoolean("human", true)); diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec index 1d32e4724195f..956378aec12d0 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec @@ -130,8 +130,7 @@ SELECT COUNT(*), TRUNCATE(emp_no, -2) t FROM test_emp WHERE 'aaabbb' RLIKE 'a{2, 1 |10100 ; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/96805 -inWithCompatibleDateTypes-Ignore +inWithCompatibleDateTypes1 SELECT birth_date FROM test_emp WHERE birth_date IN ({d '1959-07-23'}, CAST('1959-12-25T00:00:00' AS TIMESTAMP), '1964-06-02T00:00:00.000Z') OR birth_date IS NULL ORDER BY birth_date; birth_date:ts @@ -178,4 +177,4 @@ SELECT COUNT(*) AS c, LEFT(CONCAT(first_name, last_name), 1) AS first_letter FRO 1 |X 3 |Y 2 |Z -; \ No newline at end of file +; diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java index bd5abb5a993e9..10db7228807d4 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -22,7 +22,7 @@ /** * Response to the request to clean all SQL resources associated with the cursor */ -public class SqlClearCursorResponse extends ActionResponse implements StatusToXContentObject { +public class SqlClearCursorResponse extends ActionResponse implements ToXContentObject { private boolean succeeded; @@ -47,7 +47,6 @@ public SqlClearCursorResponse setSucceeded(boolean succeeded) { return this; } - @Override public RestStatus status() { return succeeded ? NOT_FOUND : OK; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlAsyncGetStatusAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlAsyncGetStatusAction.java index 5be7336d2cfad..4ce9655872cb2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlAsyncGetStatusAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlAsyncGetStatusAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.async.GetAsyncStatusRequest; import java.util.List; @@ -35,6 +35,6 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { GetAsyncStatusRequest statusRequest = new GetAsyncStatusRequest(request.param(ID_NAME)); - return channel -> client.execute(SqlAsyncGetStatusAction.INSTANCE, statusRequest, new RestStatusToXContentListener<>(channel)); + return channel -> client.execute(SqlAsyncGetStatusAction.INSTANCE, statusRequest, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/analytics/histogram.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/analytics/histogram.yml index 7c1d99458291f..b2f710e5ffed8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/analytics/histogram.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/analytics/histogram.yml @@ -251,3 +251,58 @@ histogram with synthetic source and zero counts: latency: values: [0.2, 0.4] counts: [7, 6] + + +--- +histogram with large count values: + - skip: + version: " - 8.10.99" + reason: Support for `long` values was introduced in 8.11.0 + + - do: + indices.create: + index: histo_large_count + body: + mappings: + properties: + latency: + type: histogram + - do: + bulk: + index: histo_large_count + refresh: true + body: + - '{"index": {}}' + - '{"latency": {"values" : [0.1, 0.2, 0.3, 0.4, 0.5], "counts" : [0, 1000000000000, 10, 1000, 1000000]}}' + + - do: + search: + index: histo_large_count + body: + size: 0 + aggs: + histo: + histogram: + field: latency + interval: 0.3 + + - length: { aggregations.histo.buckets: 2 } + - match: { aggregations.histo.buckets.0.key: 0.0 } + - match: { aggregations.histo.buckets.0.doc_count: 1000000000000 } + - match: { aggregations.histo.buckets.1.key: 0.3 } + - match: { aggregations.histo.buckets.1.doc_count: 1001010 } + + - do: + search: + index: histo_large_count + body: + size: 0 + aggs: + percent: + percentiles: + field: latency + + - length: { aggregations.percent.values: 7 } + - match: { aggregations.percent.values.1\.0: 0.2 } + - match: { aggregations.percent.values.5\.0: 0.2 } + - match: { aggregations.percent.values.25\.0: 0.2 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml index 417c52e391b7d..b38c6857108cc 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml @@ -1214,3 +1214,30 @@ setup: ml.get_trained_models: model_id: a-regression-model-0 include: definition_status + +--- +"Test include model platform architecture": + - do: + ml.put_trained_model: + model_id: model-without-definition + body: > + { + "model_type": "pytorch", + "inference_config": { + "ner": { + } + }, + "platform_architecture": "windows-x86_64" + } + + - do: + ml.get_trained_models: + model_id: model-without-definition + include: definition_status + - match: { count: 1 } + - match: { trained_model_configs.0.fully_defined: false } + - do: + ml.get_trained_models: + model_id: model-without-definition + - match: { count: 1 } + - match: { trained_model_configs.0.platform_architecture: windows-x86_64 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml new file mode 100644 index 0000000000000..28a6ad826bc64 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml @@ -0,0 +1,111 @@ +# This test uses the simple model defined in +# TextExpansionQueryIT.java to create the token weights. +setup: + - skip: + version: ' - 8.10.99' + reason: "sparse_vector field type reintroduced in 8.11" + features: headers + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: index-with-rank-features + body: + mappings: + properties: + source_text: + type: keyword + ml.tokens: + type: rank_features + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: unrelated + body: + mappings: + properties: + source_text: + type: keyword + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model: + model_id: "text_expansion_model" + body: > + { + "description": "simple model for testing", + "model_type": "pytorch", + "inference_config": { + "text_expansion": { + "tokenization": { + "bert": { + "with_special_tokens": false + } + } + } + } + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_vocabulary: + model_id: "text_expansion_model" + body: > + { "vocabulary": ["[PAD]", "[UNK]", "these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"] } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_definition_part: + model_id: "text_expansion_model" + part: 0 + body: > + { + "total_definition_length":2078, + "definition": "UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWlpaWlpaWlpaWoACY19fdG9yY2hfXwpUaW55VGV4dEV4cGFuc2lvbgpxACmBfShYCAAAAHRyYWluaW5ncQGJWBYAAABfaXNfZnVsbF9iYWNrd2FyZF9ob29rcQJOdWJxAy5QSwcIITmbsFgAAABYAAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAAAAAAAdAB0Ac2ltcGxlbW9kZWwvY29kZS9fX3RvcmNoX18ucHlGQhkAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWoWRT4+cMAzF7/spfASJomF3e0Ga3nrrn8vcELIyxAzRhAQlpjvbT19DWDrdquqBA/bvPT87nVUxwsm41xPd+PNtUi4a77KvXs+W8voBAHFSQY3EFCIiHKFp1+p57vs/ShyUccZdoIaz93aBTMR+thbPqru+qKBx8P4q/e8TyxRlmwVctJp66H1YmCyS7WsZwD50A2L5V7pCBADGTTOj0bGGE7noQyqzv5JDfp0o9fZRCWqP37yjhE4+mqX5X3AdFZHGM/2TzOHDpy1IvQWR+OWo3KwsRiKdpcqg4pBFDtm+QJ7nqwIPckrlnGfFJG0uNhOl38Sjut3pCqg26QuZy8BR9In7ScHHrKkKMW0TIucFrGQXCMpdaDO05O6DpOiy8e4kr0Ed/2YKOIhplW8gPr4ntygrd9ixpx3j9UZZVRagl2c6+imWUzBjuf5m+Ch7afphuvvW+r/0dsfn+2N9MZGb9+/SFtCYdhd83CMYp+mGy0LiKNs8y/eUuEA8B/d2z4dfUEsHCFSE3IaCAQAAIAMAAFBLAwQUAAgICAAAAAAAAAAAAAAAAAAAAAAAJwApAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYnVnX3BrbEZCJQBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpahZHLbtNAFIZtp03rSVIuLRKXjdk5ojitKJsiFq24lem0KKSqpRIZt55gE9/GM+lNLFgx4i1Ys2aHhIBXgAVICNggHgNm6rqJN2BZGv36/v/MOWeea/Z5RVHurLfRUsfZXOnccx522itrd53O0vLqbaKYtsAKUe1pcege7hm9JNtzM8+kOOzNApIX0A3xBXE6YE7g0UWjg2OaZAJXbKvALOnj2GEHKc496ykLktgNt3Jz17hprCUxFqExe7YIpQkNpO1/kfHhPUdtUAdH2/gfmeYiIFW7IkM6IBP2wrDNbMe3Mjf2ksiK3Hjghg7F2DN9l/omZZl5Mmez2QRk0q4WUUB0+1oh9nDwxGdUXJdXPMRZQs352eGaRPV9s2lcMeZFGWBfKJJiw0YgbCMLBaRmXyy4flx6a667Fch55q05QOq2Jg2ANOyZwplhNsjiohVApo7aa21QnNGW5+4GXv8gxK1beBeHSRrhmLXWVh+0aBhErZ7bx1ejxMOhlR6QU4ycNqGyk8/yNGCWkwY7/RCD7UEQek4QszCgDJAzZtfErA0VqHBy9ugQP9pUfUmgCjVYgWNwHFbhBJyEOgSwBuuwARWZmoI6J9PwLfzEocpRpPrT8DP8wqHG0b4UX+E3DiscvRglXIoi81KKPwioHI5x9EooNKWiy0KOc/T6WF4SssrRuzJ9L2VNRXUhJzj6UKYfS4W/q/5wuh/l4M9R9qsU+y2dpoo2hJzkaEET8r6KRONicnRdK9EbUi6raFVIwNGjsrlbpk6ZPi7TbS3fv3LyNjPiEKzG0aG0tvNb6xw90/whe6ONjnJcUxobHDUqQ8bIOW79BVBLBwhfSmPKdAIAAE4EAABQSwMEAAAICAAAAAAAAAAAAAAAAAAAAAAAABkABQBzaW1wbGVtb2RlbC9jb25zdGFudHMucGtsRkIBAFqAAikuUEsHCG0vCVcEAAAABAAAAFBLAwQAAAgIAAAAAAAAAAAAAAAAAAAAAAAAEwA7AHNpbXBsZW1vZGVsL3ZlcnNpb25GQjcAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWjMKUEsHCNGeZ1UCAAAAAgAAAFBLAQIAAAAACAgAAAAAAAAhOZuwWAAAAFgAAAAUAAAAAAAAAAAAAAAAAAAAAABzaW1wbGVtb2RlbC9kYXRhLnBrbFBLAQIAABQACAgIAAAAAABUhNyGggEAACADAAAdAAAAAAAAAAAAAAAAAKgAAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weVBLAQIAABQACAgIAAAAAABfSmPKdAIAAE4EAAAnAAAAAAAAAAAAAAAAAJICAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weS5kZWJ1Z19wa2xQSwECAAAAAAgIAAAAAAAAbS8JVwQAAAAEAAAAGQAAAAAAAAAAAAAAAACEBQAAc2ltcGxlbW9kZWwvY29uc3RhbnRzLnBrbFBLAQIAAAAACAgAAAAAAADRnmdVAgAAAAIAAAATAAAAAAAAAAAAAAAAANQFAABzaW1wbGVtb2RlbC92ZXJzaW9uUEsGBiwAAAAAAAAAHgMtAAAAAAAAAAAABQAAAAAAAAAFAAAAAAAAAGoBAAAAAAAAUgYAAAAAAABQSwYHAAAAALwHAAAAAAAAAQAAAFBLBQYAAAAABQAFAGoBAABSBgAAAAA=", + "total_parts": 1 + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + bulk: + index: index-with-rank-features + refresh: true + body: | + {"index": {}} + {"source_text": "my words comforter", "ml.tokens":{"my":1.0, "words":1.0,"comforter":1.0}} + {"index": {}} + {"source_text": "the machine is leaking", "ml.tokens":{"the":1.0,"machine":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "these are my words", "ml.tokens":{"these":1.0,"are":1.0,"my":1.0,"words":1.0}} + {"index": {}} + {"source_text": "the octopus comforter smells", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"smells":1.0}} + {"index": {}} + {"source_text": "the octopus comforter is leaking", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "washing machine smells", "ml.tokens":{"washing":1.0,"machine":1.0,"smells":1.0}} + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + ml.start_trained_model_deployment: + model_id: text_expansion_model + wait_for: started + +--- +"Test text expansion search": + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml new file mode 100644 index 0000000000000..5a31af18f8269 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml @@ -0,0 +1,111 @@ +# This test uses the simple model defined in +# TextExpansionQueryIT.java to create the token weights. +setup: + - skip: + features: headers + version: ' - 8.7.99' + reason: "text_expansion query introduced in 8.8" + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: index-with-rank-features + body: + mappings: + properties: + source_text: + type: keyword + ml.tokens: + type: sparse_vector + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.create: + index: unrelated + body: + mappings: + properties: + source_text: + type: keyword + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model: + model_id: "text_expansion_model" + body: > + { + "description": "simple model for testing", + "model_type": "pytorch", + "inference_config": { + "text_expansion": { + "tokenization": { + "bert": { + "with_special_tokens": false + } + } + } + } + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_vocabulary: + model_id: "text_expansion_model" + body: > + { "vocabulary": ["[PAD]", "[UNK]", "these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"] } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_trained_model_definition_part: + model_id: "text_expansion_model" + part: 0 + body: > + { + "total_definition_length":2078, + "definition": "UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWlpaWlpaWlpaWoACY19fdG9yY2hfXwpUaW55VGV4dEV4cGFuc2lvbgpxACmBfShYCAAAAHRyYWluaW5ncQGJWBYAAABfaXNfZnVsbF9iYWNrd2FyZF9ob29rcQJOdWJxAy5QSwcIITmbsFgAAABYAAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAAAAAAAdAB0Ac2ltcGxlbW9kZWwvY29kZS9fX3RvcmNoX18ucHlGQhkAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWoWRT4+cMAzF7/spfASJomF3e0Ga3nrrn8vcELIyxAzRhAQlpjvbT19DWDrdquqBA/bvPT87nVUxwsm41xPd+PNtUi4a77KvXs+W8voBAHFSQY3EFCIiHKFp1+p57vs/ShyUccZdoIaz93aBTMR+thbPqru+qKBx8P4q/e8TyxRlmwVctJp66H1YmCyS7WsZwD50A2L5V7pCBADGTTOj0bGGE7noQyqzv5JDfp0o9fZRCWqP37yjhE4+mqX5X3AdFZHGM/2TzOHDpy1IvQWR+OWo3KwsRiKdpcqg4pBFDtm+QJ7nqwIPckrlnGfFJG0uNhOl38Sjut3pCqg26QuZy8BR9In7ScHHrKkKMW0TIucFrGQXCMpdaDO05O6DpOiy8e4kr0Ed/2YKOIhplW8gPr4ntygrd9ixpx3j9UZZVRagl2c6+imWUzBjuf5m+Ch7afphuvvW+r/0dsfn+2N9MZGb9+/SFtCYdhd83CMYp+mGy0LiKNs8y/eUuEA8B/d2z4dfUEsHCFSE3IaCAQAAIAMAAFBLAwQUAAgICAAAAAAAAAAAAAAAAAAAAAAAJwApAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYnVnX3BrbEZCJQBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpahZHLbtNAFIZtp03rSVIuLRKXjdk5ojitKJsiFq24lem0KKSqpRIZt55gE9/GM+lNLFgx4i1Ys2aHhIBXgAVICNggHgNm6rqJN2BZGv36/v/MOWeea/Z5RVHurLfRUsfZXOnccx522itrd53O0vLqbaKYtsAKUe1pcege7hm9JNtzM8+kOOzNApIX0A3xBXE6YE7g0UWjg2OaZAJXbKvALOnj2GEHKc496ykLktgNt3Jz17hprCUxFqExe7YIpQkNpO1/kfHhPUdtUAdH2/gfmeYiIFW7IkM6IBP2wrDNbMe3Mjf2ksiK3Hjghg7F2DN9l/omZZl5Mmez2QRk0q4WUUB0+1oh9nDwxGdUXJdXPMRZQs352eGaRPV9s2lcMeZFGWBfKJJiw0YgbCMLBaRmXyy4flx6a667Fch55q05QOq2Jg2ANOyZwplhNsjiohVApo7aa21QnNGW5+4GXv8gxK1beBeHSRrhmLXWVh+0aBhErZ7bx1ejxMOhlR6QU4ycNqGyk8/yNGCWkwY7/RCD7UEQek4QszCgDJAzZtfErA0VqHBy9ugQP9pUfUmgCjVYgWNwHFbhBJyEOgSwBuuwARWZmoI6J9PwLfzEocpRpPrT8DP8wqHG0b4UX+E3DiscvRglXIoi81KKPwioHI5x9EooNKWiy0KOc/T6WF4SssrRuzJ9L2VNRXUhJzj6UKYfS4W/q/5wuh/l4M9R9qsU+y2dpoo2hJzkaEET8r6KRONicnRdK9EbUi6raFVIwNGjsrlbpk6ZPi7TbS3fv3LyNjPiEKzG0aG0tvNb6xw90/whe6ONjnJcUxobHDUqQ8bIOW79BVBLBwhfSmPKdAIAAE4EAABQSwMEAAAICAAAAAAAAAAAAAAAAAAAAAAAABkABQBzaW1wbGVtb2RlbC9jb25zdGFudHMucGtsRkIBAFqAAikuUEsHCG0vCVcEAAAABAAAAFBLAwQAAAgIAAAAAAAAAAAAAAAAAAAAAAAAEwA7AHNpbXBsZW1vZGVsL3ZlcnNpb25GQjcAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWjMKUEsHCNGeZ1UCAAAAAgAAAFBLAQIAAAAACAgAAAAAAAAhOZuwWAAAAFgAAAAUAAAAAAAAAAAAAAAAAAAAAABzaW1wbGVtb2RlbC9kYXRhLnBrbFBLAQIAABQACAgIAAAAAABUhNyGggEAACADAAAdAAAAAAAAAAAAAAAAAKgAAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weVBLAQIAABQACAgIAAAAAABfSmPKdAIAAE4EAAAnAAAAAAAAAAAAAAAAAJICAABzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weS5kZWJ1Z19wa2xQSwECAAAAAAgIAAAAAAAAbS8JVwQAAAAEAAAAGQAAAAAAAAAAAAAAAACEBQAAc2ltcGxlbW9kZWwvY29uc3RhbnRzLnBrbFBLAQIAAAAACAgAAAAAAADRnmdVAgAAAAIAAAATAAAAAAAAAAAAAAAAANQFAABzaW1wbGVtb2RlbC92ZXJzaW9uUEsGBiwAAAAAAAAAHgMtAAAAAAAAAAAABQAAAAAAAAAFAAAAAAAAAGoBAAAAAAAAUgYAAAAAAABQSwYHAAAAALwHAAAAAAAAAQAAAFBLBQYAAAAABQAFAGoBAABSBgAAAAA=", + "total_parts": 1 + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + bulk: + index: index-with-rank-features + refresh: true + body: | + {"index": {}} + {"source_text": "my words comforter", "ml.tokens":{"my":1.0, "words":1.0,"comforter":1.0}} + {"index": {}} + {"source_text": "the machine is leaking", "ml.tokens":{"the":1.0,"machine":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "these are my words", "ml.tokens":{"these":1.0,"are":1.0,"my":1.0,"words":1.0}} + {"index": {}} + {"source_text": "the octopus comforter smells", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"smells":1.0}} + {"index": {}} + {"source_text": "the octopus comforter is leaking", "ml.tokens":{"the":1.0,"octopus":1.0,"comforter":1.0,"is":1.0,"leaking":1.0}} + {"index": {}} + {"source_text": "washing machine smells", "ml.tokens":{"washing":1.0,"machine":1.0,"smells":1.0}} + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + ml.start_trained_model_deployment: + model_id: text_expansion_model + wait_for: started + +--- +"Test text expansion search": + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java index bb33353d7b714..74eea067f8811 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.settings.Settings; @@ -97,7 +96,7 @@ public void testUpdateDeletesOldTransformConfig() throws Exception { IndexRequest indexRequest = new IndexRequest(OLD_INDEX).id(TransformConfig.documentId(transformId)) .source(config, XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertThat(indexResponse.getResult(), is(DocWriteResponse.Result.CREATED)); GetTransformAction.Request getTransformRequest = new GetTransformAction.Request(transformId); diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformOldTransformsIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformOldTransformsIT.java index ca03f7eb9dd9c..346765c515d31 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformOldTransformsIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/integration/TransformOldTransformsIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.ValidationException; @@ -110,7 +109,7 @@ public void testStopThrowsForDeprecatedTransformConfig() throws Exception { IndexRequest indexRequest = new IndexRequest(OLD_INDEX).id(TransformConfig.documentId(transformId)) .source(config, XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - IndexResponse indexResponse = client().index(indexRequest).actionGet(); + DocWriteResponse indexResponse = client().index(indexRequest).actionGet(); assertThat(indexResponse.getResult(), is(DocWriteResponse.Result.CREATED)); GetTransformAction.Request getTransformRequest = new GetTransformAction.Request(transformId); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndex.java index 1ecd300b243ba..3273ae810b77e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndex.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.persistence; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.search.SearchHit; import java.util.Objects; @@ -24,7 +24,7 @@ public static SeqNoPrimaryTermAndIndex fromSearchHit(SearchHit hit) { return new SeqNoPrimaryTermAndIndex(hit.getSeqNo(), hit.getPrimaryTerm(), hit.getIndex()); } - public static SeqNoPrimaryTermAndIndex fromIndexResponse(IndexResponse response) { + public static SeqNoPrimaryTermAndIndex fromIndexResponse(DocWriteResponse response) { return new SeqNoPrimaryTermAndIndex(response.getSeqNo(), response.getPrimaryTerm(), response.getIndex()); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java index af8ada52b8057..82f302b6fb44d 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.core.TimeValue; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; @@ -54,7 +53,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase { @Before public void indexTestDocument() { - IndexResponse eventIndexResponse = client().prepareIndex() + DocWriteResponse eventIndexResponse = client().prepareIndex() .setIndex("events") .setId(id) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java index ec03215435fae..d268c6df4b21b 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.watcher.transport.action.activate; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.set.Sets; @@ -143,7 +143,7 @@ public void testLoadWatchWithoutAState() throws Exception { source.toXContent(builder, ToXContent.EMPTY_PARAMS); // now that we filtered out the watch status state, lets put it back in - IndexResponse indexResponse = client().prepareIndex() + DocWriteResponse indexResponse = client().prepareIndex() .setIndex(".watches") .setId("_id") .setSource(BytesReference.bytes(builder), XContentType.JSON) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java index d919f79b0cb02..0a67129495cb5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.TimeValue; @@ -116,7 +115,7 @@ public Action.Result execute(String actionId, WatchExecutionContext ctx, Payload } ClientHelper.assertNoAuthorizationHeader(ctx.watch().status().getHeaders()); - IndexResponse response = ClientHelper.executeWithHeaders( + DocWriteResponse response = ClientHelper.executeWithHeaders( ctx.watch().status().getHeaders(), ClientHelper.WATCHER_ORIGIN, client, @@ -273,7 +272,7 @@ private static void itemResponseToXContent(XContentBuilder builder, BulkItemResp } } - static void indexResponseToXContent(XContentBuilder builder, IndexResponse response) throws IOException { + static void indexResponseToXContent(XContentBuilder builder, DocWriteResponse response) throws IOException { builder.startObject() .field("created", response.getResult() == DocWriteResponse.Result.CREATED) .field("result", response.getResult().getLowercase()) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchAction.java index 405fcac33ff9e..f19a1ecd0c0fc 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -153,7 +152,7 @@ protected void doExecute(PutWatchRequest request, ActionListenerwrap(response -> { + ActionListener.wrap(response -> { boolean created = response.getResult() == DocWriteResponse.Result.CREATED; listener.onResponse( new PutWatchResponse( diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java index 57582b9a9793b..c355ec3ebb0ed 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -241,7 +242,7 @@ public void testThatIndexTypeIdDynamically() throws Exception { final WatchExecutionContext ctx = WatcherTestUtils.mockExecutionContext("_id", new Payload.Simple(Maps.ofEntries(entries))); ArgumentCaptor captor = ArgumentCaptor.forClass(IndexRequest.class); - PlainActionFuture listener = PlainActionFuture.newFuture(); + PlainActionFuture listener = PlainActionFuture.newFuture(); listener.onResponse(new IndexResponse(new ShardId(new Index("foo", "bar"), 0), "whatever", 1, 1, 1, true)); when(client.index(captor.capture())).thenReturn(listener); Action.Result result = executable.execute("_id", ctx, ctx.payload()); @@ -339,7 +340,7 @@ public void testIndexActionExecuteSingleDoc() throws Exception { WatchExecutionContext ctx = WatcherTestUtils.mockExecutionContext("_id", executionTime, payload); ArgumentCaptor captor = ArgumentCaptor.forClass(IndexRequest.class); - PlainActionFuture listener = PlainActionFuture.newFuture(); + PlainActionFuture listener = PlainActionFuture.newFuture(); listener.onResponse(new IndexResponse(new ShardId(new Index("test-index", "uuid"), 0), docId, 1, 1, 1, true)); when(client.index(captor.capture())).thenReturn(listener); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index fa36d2aac7f14..8111e9e68df8a 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import org.junit.BeforeClass; @@ -123,8 +124,11 @@ private void testSnapshotUpgrade() throws Exception { Response getSnapshotsResponse = getModelSnapshots(JOB_ID); List> snapshots = (List>) entityAsMap(getSnapshotsResponse).get("model_snapshots"); assertThat(snapshots, hasSize(2)); - assertThat(Integer.parseInt(snapshots.get(0).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); - assertThat(Integer.parseInt(snapshots.get(1).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); + MlConfigVersion snapshotConfigVersion = MlConfigVersion.fromString(snapshots.get(0).get("min_version").toString()); + assertTrue( + "Expected " + snapshotConfigVersion + " not greater than " + MlConfigVersion.CURRENT, + snapshotConfigVersion.onOrBefore(MlConfigVersion.CURRENT) + ); Map snapshotToUpgrade = snapshots.stream() .filter(s -> s.get("snapshot_id").equals(currentSnapshotId) == false) @@ -232,8 +236,11 @@ private void createJobAndSnapshots() throws Exception { var modelSnapshots = entityAsMap(getModelSnapshots(JOB_ID)); var snapshots = (List>) modelSnapshots.get("model_snapshots"); assertThat(snapshots, hasSize(2)); - assertThat(Integer.parseInt(snapshots.get(0).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); - assertThat(Integer.parseInt(snapshots.get(1).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); + MlConfigVersion snapshotConfigVersion = MlConfigVersion.fromString(snapshots.get(0).get("min_version").toString()); + assertTrue( + "Expected " + snapshotConfigVersion + " not greater than " + MlConfigVersion.CURRENT, + snapshotConfigVersion.onOrBefore(MlConfigVersion.CURRENT) + ); } private Response buildAndPutJob(String jobId, TimeValue bucketSpan) throws Exception { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 72ccea479277f..0c0a35f227c8e 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.SecureString; @@ -257,7 +256,7 @@ protected void assertAccessAllowed(String user, String index) throws IOException // We can safely re-try this if it fails, which makes it less likely that the index request will fail authenticateUser(client, user, 3); - IndexResponse indexResponse = client.prepareIndex(index) + DocWriteResponse indexResponse = client.prepareIndex(index) .setSource(jsonBuilder().startObject().field("name", "value").endObject()) .execute() .actionGet();