From 404aa77849f76584f31d0b5be0d77b109e8e5253 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 11 Sep 2023 15:13:50 -0400 Subject: [PATCH 001/114] Fix a typo in the data_stream _stats API documentation (#99438) --- docs/reference/indices/data-stream-stats.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/indices/data-stream-stats.asciidoc b/docs/reference/indices/data-stream-stats.asciidoc index 3d27eacf830da..d35e2738d0321 100644 --- a/docs/reference/indices/data-stream-stats.asciidoc +++ b/docs/reference/indices/data-stream-stats.asciidoc @@ -120,7 +120,7 @@ Total number of selected data streams. (integer) Total number of backing indices for the selected data streams. -`total_store_sizes`:: +`total_store_size`:: (<>) Total size of all shards for the selected data streams. This property is included only if the `human` query parameter is `true`. From a7617db23d118cf9977edebfefeab74a8c7127da Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 11 Sep 2023 15:19:43 -0400 Subject: [PATCH 002/114] Drop changelog (#99436) We don't want it. --- docs/changelog/99434.yaml | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 docs/changelog/99434.yaml diff --git a/docs/changelog/99434.yaml b/docs/changelog/99434.yaml deleted file mode 100644 index b03bc4f3c9b41..0000000000000 --- a/docs/changelog/99434.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99434 -summary: "ESQL: Disable optimizations with bad null handling" -area: ES|QL -type: bug -issues: [] From 6f90fd7ecfc111af5753cf6483d578abd2a28a5a Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 11 Sep 2023 16:18:48 -0400 Subject: [PATCH 003/114] [buildkite] Add buildkite pr-bot config for triggering builds with opt-in label (#99446) --- .buildkite/pull-requests.json | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .buildkite/pull-requests.json diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json new file mode 100644 index 0000000000000..c6682e55642c5 --- /dev/null +++ b/.buildkite/pull-requests.json @@ -0,0 +1,18 @@ +{ + "jobs": [ + { + "enabled": true, + "pipeline_slug": "elasticsearch-pull-request", + "allow_org_users": true, + "allowed_repo_permissions": [ + "admin", + "write" + ], + "set_commit_status": false, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "buildkite\\W+elasticsearch-ci.+", + "labels": "buildkite-opt-in" + } + ] +} From 590f27a5cbc8c75ba321fdacab625e30774aeba3 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 11 Sep 2023 16:25:43 -0400 Subject: [PATCH 004/114] Fix pull-requests.json --- .buildkite/pull-requests.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index c6682e55642c5..466b69e008241 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -12,7 +12,7 @@ "build_on_commit": true, "build_on_comment": true, "trigger_comment_regex": "buildkite\\W+elasticsearch-ci.+", - "labels": "buildkite-opt-in" + "labels": ["buildkite-opt-in"] } ] } From 6602b6c726daee6ccf48a86e2208c1a1c9bc7a24 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 12 Sep 2023 00:22:11 +0300 Subject: [PATCH 005/114] ESQL: create a Vector when needed for IN (#99382) --- docs/changelog/99382.yaml | 6 ++++++ .../qa/testFixtures/src/main/resources/string.csv-spec | 7 +++++++ .../evaluator/predicate/operator/comparison/InMapper.java | 7 ++++++- 3 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/99382.yaml diff --git a/docs/changelog/99382.yaml b/docs/changelog/99382.yaml new file mode 100644 index 0000000000000..5f5eb932ed458 --- /dev/null +++ b/docs/changelog/99382.yaml @@ -0,0 +1,6 @@ +pr: 99382 +summary: "ESQL: create a Vector when needed for IN" +area: ES|QL +type: bug +issues: + - 99347 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index aa893e63e1a30..357f6369dca73 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -327,6 +327,13 @@ emp_no:integer |job_positions:keyword |is_in:boolean 10026 |Reporting Analyst |null ; +in3VLWithNull-99347_bugfix +from employees | where emp_no == 10025 | keep emp_no, job_positions | eval is_in = job_positions in ("Accountant", "Internship", null); + +emp_no:integer |job_positions:keyword |is_in:boolean +10025 |Accountant |true +; + in3VLWithComputedNull from employees | sort emp_no | where mv_count(job_positions) <= 1 | where emp_no >= 10024 | limit 3 | keep emp_no, job_positions | eval nil = concat("", null) | eval is_in = job_positions in ("Accountant", "Internship", nil); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java index 7ed08b658c75e..b99cccab54ba3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java @@ -106,7 +106,12 @@ private static Block evalWithNulls(boolean[] values, BitSet nulls, boolean nullI nulls.set(i); } // else: leave nulls as is } - return new BooleanArrayBlock(values, values.length, null, nulls, Block.MvOrdering.UNORDERED); + if (nulls.isEmpty()) { + // no nulls and no multi-values means we must use a Vector + return new BooleanArrayVector(values, values.length).asBlock(); + } else { + return new BooleanArrayBlock(values, values.length, null, nulls, Block.MvOrdering.UNORDERED); + } } } } From f241f2bb6ae6944799fe6cf36d5674a0ec46667f Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 12 Sep 2023 01:22:27 +0300 Subject: [PATCH 006/114] ESQL: Make the stats test more deterministic (for multi-node testing) (#99451) --- .../src/main/resources/stats.csv-spec | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index c1f623fda251d..a8918251b5ed2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -360,21 +360,21 @@ byUnmentionedLongAndLong FROM employees | EVAL trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000 | STATS c = count(gender) by languages.long, trunk_worked_seconds -| SORT c desc, trunk_worked_seconds; +| SORT c desc, trunk_worked_seconds, languages.long; c:long | languages.long:long | trunk_worked_seconds:long -13 |5 |300000000 -10 |2 |300000000 - 9 |3 |200000000 - 9 |4 |300000000 - 8 |4 |200000000 - 8 |3 |300000000 - 7 |1 |200000000 - 6 |2 |200000000 - 6 |null |300000000 - 6 |1 |300000000 - 4 |null |200000000 - 4 |5 |200000000 +13 |5 |300000000 +10 |2 |300000000 +9 |3 |200000000 +9 |4 |300000000 +8 |4 |200000000 +8 |3 |300000000 +7 |1 |200000000 +6 |2 |200000000 +6 |1 |300000000 +6 |null |300000000 +4 |5 |200000000 +4 |null |200000000 ; byUnmentionedIntAndLong From 4ee229779b8a448953a97402401be81f0455d3d8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 12 Sep 2023 07:16:39 +0100 Subject: [PATCH 007/114] Clean up delete code in S3BlobContainer (#99447) Simplifies things using utils from `Iterators` that didn't exist when the code was first written. --- .../repositories/s3/S3BlobContainer.java | 55 +++---------------- .../repositories/s3/S3BlobStore.java | 4 ++ 2 files changed, 13 insertions(+), 46 deletions(-) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 0057f36d94cb8..86650bc0fe9c2 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -23,7 +23,6 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PartETag; import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.UploadPartRequest; import com.amazonaws.services.s3.model.UploadPartResult; @@ -60,7 +59,6 @@ import java.io.OutputStream; import java.time.Instant; import java.util.ArrayList; -import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.List; @@ -272,14 +270,13 @@ public void writeBlobAtomic(String blobName, BytesReference bytes, boolean failI } @Override - @SuppressWarnings("unchecked") public DeleteResult delete() throws IOException { final AtomicLong deletedBlobs = new AtomicLong(); final AtomicLong deletedBytes = new AtomicLong(); try (AmazonS3Reference clientReference = blobStore.clientReference()) { ObjectListing prevListing = null; while (true) { - ObjectListing list; + final ObjectListing list; if (prevListing != null) { final var listNextBatchOfObjectsRequest = new ListNextBatchOfObjectsRequest(prevListing); listNextBatchOfObjectsRequest.setRequestMetricCollector(blobStore.listMetricCollector); @@ -291,26 +288,16 @@ public DeleteResult delete() throws IOException { listObjectsRequest.setRequestMetricCollector(blobStore.listMetricCollector); list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); } - final Iterator objectSummaryIterator = list.getObjectSummaries().iterator(); - final Iterator blobNameIterator = new Iterator<>() { - @Override - public boolean hasNext() { - return objectSummaryIterator.hasNext(); - } - - @Override - public String next() { - final S3ObjectSummary summary = objectSummaryIterator.next(); - deletedBlobs.incrementAndGet(); - deletedBytes.addAndGet(summary.getSize()); - return summary.getKey(); - } - }; + final Iterator blobNameIterator = Iterators.map(list.getObjectSummaries().iterator(), summary -> { + deletedBlobs.incrementAndGet(); + deletedBytes.addAndGet(summary.getSize()); + return summary.getKey(); + }); if (list.isTruncated()) { - doDeleteBlobs(blobNameIterator, false); + blobStore.deleteBlobsIgnoringIfNotExists(blobNameIterator); prevListing = list; } else { - doDeleteBlobs(Iterators.concat(blobNameIterator, Collections.singletonList(keyPath).iterator()), false); + blobStore.deleteBlobsIgnoringIfNotExists(Iterators.concat(blobNameIterator, Iterators.single(keyPath))); break; } } @@ -322,31 +309,7 @@ public String next() { @Override public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { - doDeleteBlobs(blobNames, true); - } - - private void doDeleteBlobs(Iterator blobNames, boolean relative) throws IOException { - if (blobNames.hasNext() == false) { - return; - } - final Iterator outstanding; - if (relative) { - outstanding = new Iterator<>() { - @Override - public boolean hasNext() { - return blobNames.hasNext(); - } - - @Override - public String next() { - return buildKey(blobNames.next()); - } - }; - } else { - outstanding = blobNames; - } - - blobStore.deleteBlobsIgnoringIfNotExists(outstanding); + blobStore.deleteBlobsIgnoringIfNotExists(Iterators.map(blobNames, this::buildKey)); } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index f25ee58772859..027fd03d83c55 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -215,6 +215,10 @@ public BlobContainer blobContainer(BlobPath path) { @Override public void deleteBlobsIgnoringIfNotExists(Iterator blobNames) throws IOException { + if (blobNames.hasNext() == false) { + return; + } + final List partition = new ArrayList<>(); try (AmazonS3Reference clientReference = clientReference()) { // S3 API only allows 1k blobs per delete so we split up the given blobs into requests of max. 1k deletes From 4c8888de9e8e0da25369a81457d33e255cf8c993 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 12 Sep 2023 18:08:05 +1000 Subject: [PATCH 008/114] Do not report failure after connections are made (#99117) Today, when the number of attempts is exhausted, ProxyConnectionStrategy checks the number of connections before returns. It reports connection failure if the number of connections is zero at the time of checking. However, this behaviour is incorrect. In rare cases, a connection can be dropped right after it is initially established and before the number checking. From the perspective of the `openConnections` method, it should not care whether or when opened connections are subsequently closed. As long as connections have been initially established, it should report success instead of failure. This PR adjusts the code to report success in above situation. Relates: #94998 Resolves: #99113 --- docs/changelog/99117.yaml | 5 ++ .../transport/ProxyConnectionStrategy.java | 21 +++----- .../ProxyConnectionStrategyTests.java | 51 +++++++++++++++++++ 3 files changed, 63 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/99117.yaml diff --git a/docs/changelog/99117.yaml b/docs/changelog/99117.yaml new file mode 100644 index 0000000000000..491692f232081 --- /dev/null +++ b/docs/changelog/99117.yaml @@ -0,0 +1,5 @@ +pr: 99117 +summary: Do not report failure after connections are made +area: Network +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java index 83a0860ba6324..35655f6260461 100644 --- a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java @@ -32,7 +32,6 @@ import java.util.Collections; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -315,19 +314,13 @@ public void onFailure(Exception e) { })); } } else { - int openConnections = connectionManager.size(); - if (openConnections == 0) { - assert false : "should not happen since onFailure should catch it and report with underlying cause"; - finished.onFailure(getNoSeedNodeLeftException(Set.of())); - } else { - logger.debug( - "unable to open maximum number of connections [remote cluster: {}, opened: {}, maximum: {}]", - clusterAlias, - openConnections, - maxNumConnections - ); - finished.onResponse(null); - } + logger.debug( + "unable to open maximum number of connections [remote cluster: {}, opened: {}, maximum: {}]", + clusterAlias, + connectionManager.size(), + maxNumConnections + ); + finished.onResponse(null); } } diff --git a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java index d2941bab3f91a..965288a989870 100644 --- a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java @@ -43,8 +43,10 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItemInArray; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.spy; @@ -480,6 +482,55 @@ public void testProxyStrategyWillResolveAddressesEachConnect() throws Exception } } + public void testConnectionsClosedAfterInitiallyEstablishedDoesNotLeadToFailure() throws InterruptedException { + try (MockTransportService remoteService = startTransport("proxy_node", VersionInformation.CURRENT, TransportVersion.current())) { + TransportAddress address = remoteService.boundAddress().publishAddress(); + + try ( + MockTransportService localService = MockTransportService.createNewService( + Settings.EMPTY, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool + ) + ) { + localService.start(); + + final var connectionManager = new ClusterConnectionManager(profile, localService.transport, threadPool.getThreadContext()); + final int numOfConnections = randomIntBetween(4, 8); + final var connectionCountDown = new CountDownLatch(numOfConnections); + connectionManager.addListener(new TransportConnectionListener() { + @Override + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { + // Count down to ensure at least the required number of connection are indeed initially established + connectionCountDown.countDown(); + // Simulate disconnection right after connection is made + connection.close(); + } + }); + + try ( + var remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + var strategy = new ProxyConnectionStrategy( + clusterAlias, + localService, + remoteConnectionManager, + Settings.EMPTY, + numOfConnections, + address.toString() + ) + ) { + final PlainActionFuture connectFuture = PlainActionFuture.newFuture(); + strategy.connect(connectFuture); + // Should see no error and the connection size is 0 + connectFuture.actionGet(); + assertThat(connectionCountDown.await(30L, TimeUnit.SECONDS), is(true)); + assertThat(remoteConnectionManager.size(), equalTo(0)); + } + } + } + } + public void testProxyStrategyWillNeedToBeRebuiltIfNumOfSocketsOrAddressesOrServerNameChange() { try (MockTransportService remoteTransport = startTransport("node1", VersionInformation.CURRENT, TransportVersion.current())) { TransportAddress remoteAddress = remoteTransport.boundAddress().publishAddress(); From ae2aacb66affa098f1e431a296a0dc4898ccd640 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 12 Sep 2023 11:54:54 +0300 Subject: [PATCH 009/114] Disable FilterByFilterAggregator through ClusterSettings (#99417) `search.aggs.rewrite_to_filter_by_filter` allows disabling FilterByFilterAggregator when used in terms and range aggregation. The same should apply to filter aggregation. Fixes #99335 --- docs/changelog/99417.yaml | 6 ++++++ .../bucket/filter/FilterByFilterAggregator.java | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/99417.yaml diff --git a/docs/changelog/99417.yaml b/docs/changelog/99417.yaml new file mode 100644 index 0000000000000..8c88a5a548dff --- /dev/null +++ b/docs/changelog/99417.yaml @@ -0,0 +1,6 @@ +pr: 99417 +summary: Disable `FilterByFilterAggregator` through `ClusterSettings` +area: Aggregations +type: enhancement +issues: + - 99335 diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java index a2ce68e3fc29e..3a2cce587f34f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java @@ -134,7 +134,7 @@ final void add(QueryToFilterAdapter filter) throws IOException { * Build the the adapter or {@code null} if the this isn't a valid rewrite. */ public final T build() throws IOException { - if (false == valid) { + if (false == valid || aggCtx.enableRewriteToFilterByFilter() == false) { return null; } class AdapterBuild implements CheckedFunction { From 16a4e542f09acef5e69daf41f5431bf2c4096dc2 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 12 Sep 2023 19:24:45 +1000 Subject: [PATCH 010/114] [Test] More robust order of assertions (#99461) This PR adjusts the order of assertions to ensure we are done with the atomic reference variable in the previous assertions before changing it to null. Resolves: #99406 --- ...tty4ServerTransportAuthenticationTests.java | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java index 2a15aa09ddccd..99a411ab11a90 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportAuthenticationTests.java @@ -204,9 +204,12 @@ public void testProxyStrategyConnectionClosesWhenAuthenticatorAlwaysFails() thro fail("No connection should be available if authn fails"); }, e -> { logger.info("Expected: no connection could not be established"); - connectionTestDone.countDown(); - assertThat(e, instanceOf(RemoteTransportException.class)); - assertThat(e.getCause(), instanceOf(authenticationException.get().getClass())); + try { + assertThat(e, instanceOf(RemoteTransportException.class)); + assertThat(e.getCause(), instanceOf(authenticationException.get().getClass())); + } finally { + connectionTestDone.countDown(); + } })); assertTrue(connectionTestDone.await(10L, TimeUnit.SECONDS)); } @@ -261,9 +264,12 @@ public void testSniffStrategyNoConnectionWhenAuthenticatorAlwaysFails() throws E fail("No connection should be available if authn fails"); }, e -> { logger.info("Expected: no connection could be established"); - connectionTestDone.countDown(); - assertThat(e, instanceOf(RemoteTransportException.class)); - assertThat(e.getCause(), instanceOf(authenticationException.get().getClass())); + try { + assertThat(e, instanceOf(RemoteTransportException.class)); + assertThat(e.getCause(), instanceOf(authenticationException.get().getClass())); + } finally { + connectionTestDone.countDown(); + } })); assertTrue(connectionTestDone.await(10L, TimeUnit.SECONDS)); } From 22a2cd0a0205745e050b973d28c58186f5f6be58 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 12 Sep 2023 14:04:58 +0300 Subject: [PATCH 011/114] More deterministic tests (#99469) --- .../qa/testFixtures/src/main/resources/stats.csv-spec | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index a8918251b5ed2..da559485d17ff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -293,15 +293,15 @@ byStringAndLong FROM employees | EVAL trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000 | STATS c = COUNT(gender) by gender, trunk_worked_seconds -| SORT c desc; +| SORT c desc, gender, trunk_worked_seconds desc; c:long | gender:keyword | trunk_worked_seconds:long 30 | M | 300000000 27 | M | 200000000 22 | F | 300000000 11 | F | 200000000 - 0 | null | 200000000 0 | null | 300000000 + 0 | null | 200000000 ; byStringAndLongWithAlias @@ -310,15 +310,15 @@ FROM employees | RENAME gender as g, trunk_worked_seconds as tws | KEEP g, tws | STATS c = count(g) by g, tws -| SORT c desc; +| SORT c desc, g, tws desc; c:long | g:keyword | tws:long 30 | M | 300000000 27 | M | 200000000 22 | F | 300000000 11 | F | 200000000 - 0 | null | 200000000 0 | null | 300000000 + 0 | null | 200000000 ; byStringAndString From 30f6e51804905c12d2b302ed9334bf5b5c7feae6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Krze=C5=9Bniak?= Date: Tue, 12 Sep 2023 13:13:38 +0200 Subject: [PATCH 012/114] Update ES|QL (#99467) To make it more clear let's use different index names for comma-separated index list --- docs/reference/esql/source-commands/from.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/esql/source-commands/from.asciidoc b/docs/reference/esql/source-commands/from.asciidoc index 69ab152de9cd8..2a83f3abc8a4e 100644 --- a/docs/reference/esql/source-commands/from.asciidoc +++ b/docs/reference/esql/source-commands/from.asciidoc @@ -25,7 +25,7 @@ or aliases: [source,esql] ---- -FROM employees-00001,employees-* +FROM employees-00001,other-employees-* ---- Use the `METADATA` directive to enable <>: From 54f6e4f51bbbf851c3f7e6fabed977d26ddebd52 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 12 Sep 2023 13:25:56 +0200 Subject: [PATCH 013/114] [DOCS] Remove 'coming in 8.10' from remote cluster API key auth docs (#99462) --- docs/reference/modules/cluster/remote-clusters-api-key.asciidoc | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc b/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc index 29fe2b0aaf35e..9451c8ba50aae 100644 --- a/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc @@ -1,7 +1,6 @@ [[remote-clusters-api-key]] === Add remote clusters using API key authentication -coming::[8.10] beta::[] API key authentication enables a local cluster to authenticate itself with a From 403bcb366a218e749916f9a11013ac0b0454e3d6 Mon Sep 17 00:00:00 2001 From: Milton Hultgren Date: Tue, 12 Sep 2023 14:11:30 +0200 Subject: [PATCH 014/114] Update CODEOWNER paths for Stack Monitoring mappings (#99428) --- .github/CODEOWNERS | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c29ed92f39547..cbe2d8ba4a82d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,17 +3,17 @@ # For more info, see https://help.github.com/articles/about-codeowners/ # Stack Monitoring index templates -x-pack/plugin/core/src/main/resources/monitoring-alerts-7.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-beats-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-beats.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-ent-search-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-es-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-es.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-kibana.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-logstash-mb.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-logstash.json @elastic/infra-monitoring-ui -x-pack/plugin/core/src/main/resources/monitoring-mb-ilm-policy.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-alerts-7.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-es.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash-mb.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash.json @elastic/infra-monitoring-ui +x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json @elastic/infra-monitoring-ui x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @elastic/infra-monitoring-ui # Fleet From e26dca469d27d94eed8812c52d43e2961e4eba2d Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 12 Sep 2023 08:12:20 -0400 Subject: [PATCH 015/114] [ESQL] Plumb through ranges and warnings for the casting to double tests (#99452) Add the warnings and range checking parameters to unary and binary casting to double test generators. I also moved the data type to the value supplier, which the binary case needed. That feels more right - that's what I was intending with TypedData to begin with, but our abstractions are still messy here. --- .../expression/function/TestCaseSupplier.java | 325 ++++++++++-------- .../scalar/convert/ToVersionTests.java | 7 +- .../function/scalar/math/AcosTests.java | 3 +- .../function/scalar/math/AsinTests.java | 3 +- .../function/scalar/math/Atan2Tests.java | 12 +- .../function/scalar/math/AtanTests.java | 3 +- .../function/scalar/math/CosTests.java | 3 +- .../function/scalar/math/CoshTests.java | 3 +- .../function/scalar/math/SinTests.java | 3 +- .../function/scalar/math/SinhTests.java | 3 +- .../function/scalar/math/TanTests.java | 3 +- .../function/scalar/math/TanhTests.java | 3 +- 12 files changed, 208 insertions(+), 163 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 3b6174bac5bf4..8b113bb12d605 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; import java.util.function.DoubleBinaryOperator; import java.util.function.DoubleFunction; import java.util.function.DoubleUnaryOperator; @@ -94,7 +93,8 @@ public static List forUnaryCastingToDouble( String argName, DoubleUnaryOperator expected, Double min, - Double max + Double max, + List warnings ) { String read = "Attribute[channel=0]"; String eval = name + "[" + argName + "="; @@ -106,7 +106,7 @@ public static List forUnaryCastingToDouble( i -> expected.applyAsDouble(i), min.intValue(), max.intValue(), - List.of() + warnings ); forUnaryLong( suppliers, @@ -115,7 +115,7 @@ public static List forUnaryCastingToDouble( l -> expected.applyAsDouble(l), min.longValue(), max.longValue(), - List.of() + warnings ); forUnaryUnsignedLong( suppliers, @@ -124,9 +124,9 @@ public static List forUnaryCastingToDouble( ul -> expected.applyAsDouble(ul.doubleValue()), BigInteger.valueOf((int) Math.ceil(min)), BigInteger.valueOf((int) Math.floor(max)), - List.of() + warnings ); - forUnaryDouble(suppliers, eval + read + "]", DataTypes.DOUBLE, i -> expected.applyAsDouble(i), min, max, List.of()); + forUnaryDouble(suppliers, eval + read + "]", DataTypes.DOUBLE, i -> expected.applyAsDouble(i), min, max, warnings); return suppliers; } @@ -138,47 +138,60 @@ public static List forBinaryCastingToDouble( String name, String lhsName, String rhsName, - DoubleBinaryOperator expected + DoubleBinaryOperator expected, + Double lhsMin, + Double lhsMax, + Double rhsMin, + Double rhsMax, + List warnings ) { List suppliers = new ArrayList<>(); - for (DataType lhsType : EsqlDataTypes.types()) { - if (lhsType.isNumeric() == false || EsqlDataTypes.isRepresentable(lhsType) == false) { - continue; - } - for (Map.Entry> lhsSupplier : RANDOM_VALUE_SUPPLIERS.get(lhsType)) { - for (DataType rhsType : EsqlDataTypes.types()) { - if (rhsType.isNumeric() == false || EsqlDataTypes.isRepresentable(rhsType) == false) { - continue; + List lhsSuppliers = new ArrayList<>(); + List rhsSuppliers = new ArrayList<>(); + + lhsSuppliers.addAll(intCases(lhsMin.intValue(), lhsMax.intValue())); + lhsSuppliers.addAll(longCases(lhsMin.longValue(), lhsMax.longValue())); + lhsSuppliers.addAll(ulongCases(BigInteger.valueOf((long) Math.ceil(lhsMin)), BigInteger.valueOf((long) Math.floor(lhsMax)))); + lhsSuppliers.addAll(doubleCases(lhsMin, lhsMax)); + + rhsSuppliers.addAll(intCases(rhsMin.intValue(), rhsMax.intValue())); + rhsSuppliers.addAll(longCases(rhsMin.longValue(), rhsMax.longValue())); + rhsSuppliers.addAll(ulongCases(BigInteger.valueOf((long) Math.ceil(rhsMin)), BigInteger.valueOf((long) Math.floor(rhsMax)))); + rhsSuppliers.addAll(doubleCases(rhsMin, rhsMax)); + + for (TypedDataSupplier lhsSupplier : lhsSuppliers) { + for (TypedDataSupplier rhsSupplier : rhsSuppliers) { + String caseName = lhsSupplier.name() + ", " + rhsSupplier.name(); + suppliers.add(new TestCaseSupplier(caseName, List.of(lhsSupplier.type(), rhsSupplier.type()), () -> { + Number lhs = (Number) lhsSupplier.supplier().get(); + Number rhs = (Number) rhsSupplier.supplier().get(); + TypedData lhsTyped = new TypedData( + // TODO there has to be a better way to handle unsigned long + lhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : lhs, + lhsSupplier.type(), + "lhs" + ); + TypedData rhsTyped = new TypedData( + rhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : rhs, + rhsSupplier.type(), + "rhs" + ); + String lhsEvalName = castToDoubleEvaluator("Attribute[channel=0]", lhsSupplier.type()); + String rhsEvalName = castToDoubleEvaluator("Attribute[channel=1]", rhsSupplier.type()); + TestCase testCase = new TestCase( + List.of(lhsTyped, rhsTyped), + name + "[" + lhsName + "=" + lhsEvalName + ", " + rhsName + "=" + rhsEvalName + "]", + DataTypes.DOUBLE, + equalTo(expected.applyAsDouble(lhs.doubleValue(), rhs.doubleValue())) + ); + for (String warning : warnings) { + testCase = testCase.withWarning(warning); } - for (Map.Entry> rhsSupplier : RANDOM_VALUE_SUPPLIERS.get(rhsType)) { - String caseName = lhsSupplier.getKey() + ", " + rhsSupplier.getKey(); - suppliers.add(new TestCaseSupplier(caseName, List.of(lhsType, rhsType), () -> { - Number lhs = (Number) lhsSupplier.getValue().get(); - Number rhs = (Number) rhsSupplier.getValue().get(); - TypedData lhsTyped = new TypedData( - // TODO there has to be a better way to handle unsigned long - lhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : lhs, - lhsType, - "lhs" - ); - TypedData rhsTyped = new TypedData( - rhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : rhs, - rhsType, - "rhs" - ); - String lhsEvalName = castToDoubleEvaluator("Attribute[channel=0]", lhsType); - String rhsEvalName = castToDoubleEvaluator("Attribute[channel=1]", rhsType); - return new TestCase( - List.of(lhsTyped, rhsTyped), - name + "[" + lhsName + "=" + lhsEvalName + ", " + rhsName + "=" + rhsEvalName + "]", - DataTypes.DOUBLE, - equalTo(expected.applyAsDouble(lhs.doubleValue(), rhs.doubleValue())) - ); - })); - } - } + return testCase; + })); } } + return suppliers; } @@ -352,7 +365,7 @@ public static void forUnaryStrings( suppliers, expectedEvaluatorToString, type, - stringCases(type.typeName()), + stringCases(type), expectedType, v -> expectedValue.apply((BytesRef) v), warnings @@ -385,7 +398,7 @@ private static void unaryNumeric( List suppliers, String expectedEvaluatorToString, DataType inputType, - List>> valueSuppliers, + List valueSuppliers, DataType expectedOutputType, Function expected, List warnings @@ -405,18 +418,18 @@ private static void unary( List suppliers, String expectedEvaluatorToString, DataType inputType, - List>> valueSuppliers, + List valueSuppliers, DataType expectedOutputType, Function expected, List warnings ) { - for (Map.Entry> supplier : valueSuppliers) { - suppliers.add(new TestCaseSupplier(supplier.getKey(), List.of(inputType), () -> { - Object value = supplier.getValue().get(); + for (TypedDataSupplier supplier : valueSuppliers) { + suppliers.add(new TestCaseSupplier(supplier.name(), List.of(supplier.type()), () -> { + Object value = supplier.supplier().get(); TypedData typed = new TypedData( // TODO there has to be a better way to handle unsigned long value instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : value, - inputType, + supplier.type(), "value" ); TestCase testCase = new TestCase( @@ -433,61 +446,61 @@ private static void unary( } } - private static List>> intCases(int min, int max) { - List>> cases = new ArrayList<>(); + private static List intCases(int min, int max) { + List cases = new ArrayList<>(); if (0 <= max && 0 >= min) { - cases.add(Map.entry("<0 int>", () -> 0)); + cases.add(new TypedDataSupplier("<0 int>", () -> 0, DataTypes.INTEGER)); } int lower = Math.max(min, 1); int upper = Math.min(max, Integer.MAX_VALUE); if (lower < upper) { - cases.add(Map.entry("", () -> ESTestCase.randomIntBetween(lower, upper))); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomIntBetween(lower, upper), DataTypes.INTEGER)); } else if (lower == upper) { - cases.add(Map.entry("<" + lower + " int>", () -> lower)); + cases.add(new TypedDataSupplier("<" + lower + " int>", () -> lower, DataTypes.INTEGER)); } int lower1 = Math.max(min, Integer.MIN_VALUE); int upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add(Map.entry("", () -> ESTestCase.randomIntBetween(lower1, upper1))); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomIntBetween(lower1, upper1), DataTypes.INTEGER)); } else if (lower1 == upper1) { - cases.add(Map.entry("<" + lower1 + " int>", () -> lower1)); + cases.add(new TypedDataSupplier("<" + lower1 + " int>", () -> lower1, DataTypes.INTEGER)); } return cases; } - private static List>> longCases(long min, long max) { - List>> cases = new ArrayList<>(); + private static List longCases(long min, long max) { + List cases = new ArrayList<>(); if (0L <= max && 0L >= min) { - cases.add(Map.entry("<0 long>", () -> 0L)); + cases.add(new TypedDataSupplier("<0 long>", () -> 0L, DataTypes.LONG)); } long lower = Math.max(min, 1); long upper = Math.min(max, Long.MAX_VALUE); if (lower < upper) { - cases.add(Map.entry("", () -> ESTestCase.randomLongBetween(lower, upper))); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower, upper), DataTypes.LONG)); } else if (lower == upper) { - cases.add(Map.entry("<" + lower + " long>", () -> lower)); + cases.add(new TypedDataSupplier("<" + lower + " long>", () -> lower, DataTypes.LONG)); } long lower1 = Math.max(min, Long.MIN_VALUE); long upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add(Map.entry("", () -> ESTestCase.randomLongBetween(lower1, upper1))); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower1, upper1), DataTypes.LONG)); } else if (lower1 == upper1) { - cases.add(Map.entry("<" + lower1 + " long>", () -> lower1)); + cases.add(new TypedDataSupplier("<" + lower1 + " long>", () -> lower1, DataTypes.LONG)); } return cases; } - private static List>> ulongCases(BigInteger min, BigInteger max) { - List>> cases = new ArrayList<>(); + private static List ulongCases(BigInteger min, BigInteger max) { + List cases = new ArrayList<>(); // Zero if (BigInteger.ZERO.compareTo(max) <= 0 && BigInteger.ZERO.compareTo(min) >= 0) { - cases.add(Map.entry("<0 unsigned long>", () -> BigInteger.ZERO)); + cases.add(new TypedDataSupplier("<0 unsigned long>", () -> BigInteger.ZERO, DataTypes.UNSIGNED_LONG)); } // small values, less than Long.MAX_VALUE @@ -495,13 +508,14 @@ private static List>> ulongCases(BigInteger m BigInteger upper1 = max.min(BigInteger.valueOf(Long.MAX_VALUE)); if (lower1.compareTo(upper1) < 0) { cases.add( - Map.entry( + new TypedDataSupplier( "", - () -> BigInteger.valueOf(ESTestCase.randomLongBetween(lower1.longValue(), upper1.longValue())) + () -> BigInteger.valueOf(ESTestCase.randomLongBetween(lower1.longValue(), upper1.longValue())), + DataTypes.UNSIGNED_LONG ) ); } else if (lower1.compareTo(upper1) == 0) { - cases.add(Map.entry("", () -> lower1)); + cases.add(new TypedDataSupplier("", () -> lower1, DataTypes.UNSIGNED_LONG)); } // Big values, greater than Long.MAX_VALUE @@ -509,51 +523,66 @@ private static List>> ulongCases(BigInteger m BigInteger upper2 = max.min(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.valueOf(Integer.MAX_VALUE))); if (lower2.compareTo(upper2) < 0) { cases.add( - Map.entry( + new TypedDataSupplier( "", - () -> BigInteger.valueOf(ESTestCase.randomLongBetween(lower2.longValue(), upper2.longValue())) + () -> BigInteger.valueOf(ESTestCase.randomLongBetween(lower2.longValue(), upper2.longValue())), + DataTypes.UNSIGNED_LONG ) ); } else if (lower2.compareTo(upper2) == 0) { - cases.add(Map.entry("", () -> lower2)); + cases.add(new TypedDataSupplier("", () -> lower2, DataTypes.UNSIGNED_LONG)); } return cases; } - private static List>> doubleCases(double min, double max) { - List>> cases = new ArrayList<>(); + private static List doubleCases(double min, double max) { + List cases = new ArrayList<>(); // Zeros if (0d <= max && 0d >= min) { - cases.add(Map.entry("<0 double>", () -> 0.0d)); - cases.add(Map.entry("<-0 double>", () -> -0.0d)); + cases.add(new TypedDataSupplier("<0 double>", () -> 0.0d, DataTypes.DOUBLE)); + cases.add(new TypedDataSupplier("<-0 double>", () -> -0.0d, DataTypes.DOUBLE)); } // Positive small double double lower1 = Math.max(0d, min); double upper1 = Math.min(1d, max); if (lower1 < upper1) { - cases.add(Map.entry("", () -> ESTestCase.randomDoubleBetween(lower1, upper1, true))); + cases.add( + new TypedDataSupplier( + "", + () -> ESTestCase.randomDoubleBetween(lower1, upper1, true), + DataTypes.DOUBLE + ) + ); } else if (lower1 == upper1) { - cases.add(Map.entry("", () -> lower1)); + cases.add(new TypedDataSupplier("", () -> lower1, DataTypes.DOUBLE)); } // Negative small double double lower2 = Math.max(-1d, min); double upper2 = Math.min(0d, max); if (lower2 < upper2) { - cases.add(Map.entry("", () -> ESTestCase.randomDoubleBetween(lower2, upper2, true))); + cases.add( + new TypedDataSupplier( + "", + () -> ESTestCase.randomDoubleBetween(lower2, upper2, true), + DataTypes.DOUBLE + ) + ); } else if (lower2 == upper2) { - cases.add(Map.entry("", () -> lower2)); + cases.add(new TypedDataSupplier("", () -> lower2, DataTypes.DOUBLE)); } // Positive big double double lower3 = Math.max(1d, min); // start at 1 (inclusive) because the density of values between 0 and 1 is very high double upper3 = Math.min(Double.MAX_VALUE, max); if (lower3 < upper3) { - cases.add(Map.entry("", () -> ESTestCase.randomDoubleBetween(lower3, upper3, true))); + cases.add( + new TypedDataSupplier("", () -> ESTestCase.randomDoubleBetween(lower3, upper3, true), DataTypes.DOUBLE) + ); } else if (lower3 == upper3) { - cases.add(Map.entry("", () -> lower3)); + cases.add(new TypedDataSupplier("", () -> lower3, DataTypes.DOUBLE)); } // Negative big double @@ -561,48 +590,73 @@ private static List>> doubleCases(double min, double lower4 = Math.max(-Double.MAX_VALUE, min); double upper4 = Math.min(-1, max); // because again, the interval from -1 to 0 is very high density if (lower4 < upper4) { - cases.add(Map.entry("", () -> ESTestCase.randomDoubleBetween(lower4, upper4, true))); + cases.add( + new TypedDataSupplier("", () -> ESTestCase.randomDoubleBetween(lower4, upper4, true), DataTypes.DOUBLE) + ); } else if (lower4 == upper4) { - cases.add(Map.entry("", () -> lower4)); + cases.add(new TypedDataSupplier("", () -> lower4, DataTypes.DOUBLE)); } return cases; } - private static List>> booleanCases() { - return List.of(Map.entry("", () -> true), Map.entry("", () -> false)); + private static List booleanCases() { + return List.of( + new TypedDataSupplier("", () -> true, DataTypes.BOOLEAN), + new TypedDataSupplier("", () -> false, DataTypes.BOOLEAN) + ); } - private static List>> dateCases() { + private static List dateCases() { return List.of( - Map.entry("<1970-01-01T00:00:00Z>", () -> 0L), - Map.entry( + new TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataTypes.DATETIME), + new TypedDataSupplier( "", - () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11) // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z + () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11), // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z + DataTypes.DATETIME ), - Map.entry( + new TypedDataSupplier( "", // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z - () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE) + () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE), + DataTypes.DATETIME ) ); } - private static List>> ipCases() { + private static List ipCases() { return List.of( - Map.entry("<127.0.0.1 ip>", () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1")))), - Map.entry("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true)))), - Map.entry("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false)))) + new TypedDataSupplier( + "<127.0.0.1 ip>", + () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1"))), + DataTypes.IP + ), + new TypedDataSupplier("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true))), DataTypes.IP), + new TypedDataSupplier("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false))), DataTypes.IP) ); } - private static List>> stringCases(String type) { - List>> result = new ArrayList<>(); - result.add(Map.entry("", () -> new BytesRef(""))); - result.add(Map.entry("", () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30)))); - result.add(Map.entry("", () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 3000)))); - result.add(Map.entry("", () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30)))); + private static List stringCases(DataType type) { + List result = new ArrayList<>(); + result.add(new TypedDataSupplier("", () -> new BytesRef(""), type)); result.add( - Map.entry("", () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 3000))) + new TypedDataSupplier("", () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(1, 30)), type) + ); + result.add( + new TypedDataSupplier("", () -> new BytesRef(ESTestCase.randomAlphaOfLengthBetween(300, 3000)), type) + ); + result.add( + new TypedDataSupplier( + "", + () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(1, 30)), + type + ) + ); + result.add( + new TypedDataSupplier( + "", + () -> new BytesRef(ESTestCase.randomRealisticUnicodeOfLengthBetween(300, 3000)), + type + ) ); return result; } @@ -610,61 +664,27 @@ private static List>> stringCases(String type /** * Supplier test case data for {@link Version} fields. */ - public static List>> versionCases(String prefix) { + public static List versionCases(String prefix) { return List.of( - Map.entry("<" + prefix + "version major>", () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef()), - Map.entry( + new TypedDataSupplier( + "<" + prefix + "version major>", + () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef(), + DataTypes.VERSION + ), + new TypedDataSupplier( "<" + prefix + "version major.minor>", - () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef() + () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef(), + DataTypes.VERSION ), - Map.entry( + new TypedDataSupplier( "<" + prefix + "version major.minor.patch>", () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)) - .toBytesRef() + .toBytesRef(), + DataTypes.VERSION ) ); } - private static final Map>>> RANDOM_VALUE_SUPPLIERS = Map.ofEntries( - Map.entry( - DataTypes.DOUBLE, - List.of( - Map.entry("<0 double>", () -> 0.0d), - Map.entry("", () -> ESTestCase.randomDouble()), - Map.entry("", () -> -ESTestCase.randomDouble()), - Map.entry("", () -> ESTestCase.randomDoubleBetween(0, Double.MAX_VALUE, false)), - Map.entry("", () -> ESTestCase.randomDoubleBetween(Double.MIN_VALUE, 0 - Double.MIN_NORMAL, true)) - ) - ), - Map.entry( - DataTypes.LONG, - List.of( - Map.entry("<0 long>", () -> 0L), - Map.entry("", () -> ESTestCase.randomLongBetween(1, Long.MAX_VALUE)), - Map.entry("", () -> ESTestCase.randomLongBetween(Long.MIN_VALUE, -1)) - ) - ), - Map.entry( - DataTypes.INTEGER, - List.of( - Map.entry("<0 int>", () -> 0), - Map.entry("", () -> ESTestCase.between(1, Integer.MAX_VALUE)), - Map.entry("", () -> ESTestCase.between(Integer.MIN_VALUE, -1)) - ) - ), - Map.entry( - DataTypes.UNSIGNED_LONG, - List.of( - Map.entry("<0 unsigned long>", () -> BigInteger.ZERO), - Map.entry("", () -> BigInteger.valueOf(ESTestCase.randomLongBetween(1, Integer.MAX_VALUE))), - Map.entry( - "", - () -> BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.valueOf(ESTestCase.randomLongBetween(1, Integer.MAX_VALUE))) - ) - ) - ) - ); - private static String castToDoubleEvaluator(String original, DataType current) { if (current == DataTypes.DOUBLE) { return original; @@ -786,6 +806,13 @@ public TestCase withWarning(String warning) { } } + /** + * Holds a supplier for a data value, along with the type of that value and a name for generating test case names. This mostly + * exists because we can't generate random values from the test parameter generation functions, and instead need to return + * suppliers which generate the random values at test execution time. + */ + public record TypedDataSupplier(String name, Supplier supplier, DataType type) {} + /** * Holds a data value and the intended parse type of that value * @param data - value to test against diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java index c84f244b08df8..fefa397f7c77f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java @@ -22,7 +22,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -51,9 +50,9 @@ public static Iterable parameters() { ); // But strings that are shaped like versions do parse to valid versions for (DataType inputType : EsqlDataTypes.types().stream().filter(EsqlDataTypes::isString).toList()) { - for (Map.Entry> versionGen : TestCaseSupplier.versionCases(inputType.typeName() + " ")) { - suppliers.add(new TestCaseSupplier(versionGen.getKey(), List.of(inputType), () -> { - BytesRef encodedVersion = (BytesRef) versionGen.getValue().get(); + for (TestCaseSupplier.TypedDataSupplier versionGen : TestCaseSupplier.versionCases(inputType.typeName() + " ")) { + suppliers.add(new TestCaseSupplier(versionGen.name(), List.of(inputType), () -> { + BytesRef encodedVersion = (BytesRef) versionGen.supplier().get(); TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData( new BytesRef(new Version(encodedVersion).toString()), inputType, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java index bf1eeacc9ed06..12bc9c48827f5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::acos, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java index 119683d2d94e1..7cba8e88940c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::asin, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java index 3f4de813679da..0a884a2311e86 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java @@ -25,7 +25,17 @@ public Atan2Tests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - List suppliers = TestCaseSupplier.forBinaryCastingToDouble("Atan2Evaluator", "y", "x", Math::atan2); + List suppliers = TestCaseSupplier.forBinaryCastingToDouble( + "Atan2Evaluator", + "y", + "x", + Math::atan2, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java index eb448b9402eea..897d4b18c3092 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::atan, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java index a024fbdb1d76f..c7b4570dab34f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::cos, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java index 6d5393469422b..2a1e81b60a02f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::cosh, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java index e9d0599842f98..788b506694d5e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::sin, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java index 0f53ea76e8462..aad1e35a09da4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::sinh, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java index 7e2bd466ffc10..1d654873f828f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::tan, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java index 1f6d5bd6e3b9f..a50fbfa642dd6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java @@ -30,7 +30,8 @@ public static Iterable parameters() { "val", Math::tanh, Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY + Double.POSITIVE_INFINITY, + List.of() ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } From 2072be90b28c5e0fd7dff1ac26cec0b448cb7b48 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 12 Sep 2023 10:05:29 -0400 Subject: [PATCH 016/114] Utilize optimized dot_product where possible when calculating vector magnitude (#99448) Lucene provides an optimized `dot_product` calculation for vectors. We should use that when calculating a vector's magnitude. --- .../mapper/vectors/DenseVectorFieldMapper.java | 11 +++-------- .../index/mapper/vectors/VectorEncoderDecoder.java | 8 ++------ .../script/field/vectors/DenseVector.java | 14 ++++---------- 3 files changed, 9 insertions(+), 24 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 0e4f871fbb8ca..bd9b9df68aff2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -859,10 +860,7 @@ public Query createKnnQuery(byte[] queryVector, int numCands, Query filter, Floa } if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { - float squaredMagnitude = 0.0f; - for (byte b : queryVector) { - squaredMagnitude += b * b; - } + float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); elementType.checkVectorMagnitude(similarity, elementType.errorByteElementsAppender(queryVector), squaredMagnitude); } Query knnQuery = new KnnByteVectorQuery(name(), queryVector, numCands, filter); @@ -891,10 +889,7 @@ public Query createKnnQuery(float[] queryVector, int numCands, Query filter, Flo elementType.checkVectorBounds(queryVector); if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { - float squaredMagnitude = 0.0f; - for (float e : queryVector) { - squaredMagnitude += e * e; - } + float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); elementType.checkVectorMagnitude(similarity, elementType.errorFloatElementsAppender(queryVector), squaredMagnitude); } Query knnQuery = switch (elementType) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java index 381c1767edff3..e3285c4dc8644 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.index.IndexVersion; import java.nio.ByteBuffer; @@ -46,12 +47,7 @@ public static float decodeMagnitude(IndexVersion indexVersion, BytesRef vectorBR * Calculates vector magnitude */ private static float calculateMagnitude(float[] decodedVector) { - double magnitude = 0.0f; - for (int i = 0; i < decodedVector.length; i++) { - magnitude += decodedVector[i] * decodedVector[i]; - } - magnitude = Math.sqrt(magnitude); - return (float) magnitude; + return (float) Math.sqrt(VectorUtil.dotProduct(decodedVector, decodedVector)); } public static float getMagnitude(IndexVersion indexVersion, BytesRef vectorBR, float[] decodedVector) { diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/DenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/DenseVector.java index 84649d9954b6a..79a4c3fa1b2ee 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/DenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/DenseVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.script.field.vectors; +import org.apache.lucene.util.VectorUtil; + import java.util.List; /** @@ -151,11 +153,7 @@ default double cosineSimilarity(Object queryVector) { int size(); static float getMagnitude(byte[] vector) { - int mag = 0; - for (int elem : vector) { - mag += elem * elem; - } - return (float) Math.sqrt(mag); + return (float) Math.sqrt(VectorUtil.dotProduct(vector, vector)); } static float getMagnitude(byte[] vector, int dims) { @@ -170,11 +168,7 @@ static float getMagnitude(byte[] vector, int dims) { } static float getMagnitude(float[] vector) { - double mag = 0.0f; - for (float elem : vector) { - mag += elem * elem; - } - return (float) Math.sqrt(mag); + return (float) Math.sqrt(VectorUtil.dotProduct(vector, vector)); } static float getMagnitude(List vector) { From b5e06da143bebe9aa0ecba8a7ddddccb3be21c7c Mon Sep 17 00:00:00 2001 From: William Brafford Date: Tue, 12 Sep 2023 11:16:55 -0400 Subject: [PATCH 017/114] Add mappings versions to CompatibilityVersions (#99307) CompatibilityVersions now holds a map of system index names to their mappings versions, alongside the transport version. We also add mapping versions to the "minimum version barrier": if a node has a system index whose version is below the cluster mappings version for that system index, it is not allowed to join the cluster. --- .../allocation/AllocationBenchmark.java | 3 +- .../cluster/ClusterStateDiffIT.java | 5 +- .../org/elasticsearch/TransportVersions.java | 1 + .../elasticsearch/cluster/ClusterState.java | 16 +- .../cluster/coordination/JoinRequest.java | 4 +- .../coordination/NodeJoinExecutor.java | 2 +- .../version/CompatibilityVersions.java | 90 +++++++++-- .../indices/SystemIndexDescriptor.java | 33 +++- .../java/org/elasticsearch/node/Node.java | 5 +- .../reroute/ClusterRerouteResponseTests.java | 3 +- .../cluster/ClusterStateTests.java | 33 +++- .../cluster/coordination/MessagesTests.java | 7 +- .../coordination/NodeJoinExecutorTests.java | 31 ---- .../TransportVersionsFixupListenerTests.java | 17 ++- .../version/CompatibilityVersionsTests.java | 141 +++++++++++++++++- .../indices/SystemIndexDescriptorTests.java | 12 ++ .../indices/cluster/ClusterStateChanges.java | 2 +- .../version/CompatibilityVersionsUtils.java | 23 ++- .../ml/utils/TransportVersionUtilsTests.java | 13 +- 19 files changed, 354 insertions(+), 87 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java index 56002554cb140..9daa5c24f3bd4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java @@ -145,7 +145,8 @@ public void setUp() throws Exception { for (int i = 1; i <= numNodes; i++) { String id = "node" + i; nb.add(Allocators.newNode(id, Collections.singletonMap("tag", "tag_" + (i % numTags)))); - compatibilityVersions.put(id, new CompatibilityVersions(TransportVersion.current())); + // system index mappings versions not needed here, so we use Map.of() + compatibilityVersions.put(id, new CompatibilityVersions(TransportVersion.current(), Map.of())); } initialClusterState = ClusterState.builder(ClusterName.DEFAULT) .metadata(metadata) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index f561cc50b4f19..373213be479a7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.version.CompatibilityVersions; +import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -238,7 +239,7 @@ private ClusterState.Builder randomNodes(ClusterState clusterState) { versions.remove(nodeId); if (randomBoolean()) { nodes.add(randomNode(nodeId)); - versions.put(nodeId, new CompatibilityVersions(TransportVersionUtils.randomVersion(random()))); + versions.put(nodeId, CompatibilityVersionsUtils.fakeSystemIndicesRandom()); } } } @@ -246,7 +247,7 @@ private ClusterState.Builder randomNodes(ClusterState clusterState) { for (int i = 0; i < additionalNodeCount; i++) { String id = "node-" + randomAlphaOfLength(10); nodes.add(randomNode(id)); - versions.put(id, new CompatibilityVersions(TransportVersionUtils.randomVersion(random()))); + versions.put(id, CompatibilityVersionsUtils.fakeSystemIndicesRandom()); } return ClusterState.builder(clusterState).nodes(nodes).compatibilityVersions(versions); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 6fb02a3ca05d2..3bc2bd7ce8499 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -136,6 +136,7 @@ static TransportVersion def(int id, String uniqueId) { public static final TransportVersion V_8_500_070 = def(8_500_070, "6BADC9CD-3C9D-4381-8BD9-B305CAA93F86"); public static final TransportVersion V_8_500_071 = def(8_500_071, "a86dfc08-3026-4f01-90ef-6d6de003e217"); public static final TransportVersion V_8_500_072 = def(8_500_072, "e2df7d80-7b74-4afd-9734-aee0fc256025"); + public static final TransportVersion V_8_500_073 = def(8_500_073, "9128e16a-e4f7-41c4-b04f-842955bfc1b4"); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 3f9e7e4d8d9ae..603a93ab11f79 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContent; @@ -133,12 +134,12 @@ default boolean isPrivate() { new DiffableUtils.NonDiffableValueSerializer<>() { @Override public void write(CompatibilityVersions value, StreamOutput out) throws IOException { - TransportVersion.writeVersion(value.transportVersion(), out); + value.writeTo(out); } @Override public CompatibilityVersions read(StreamInput in, String key) throws IOException { - return new CompatibilityVersions(TransportVersion.readVersion(in)); + return CompatibilityVersions.readVersion(in); } }; @@ -222,8 +223,8 @@ public ClusterState( this.routingNodes = routingNodes; assert assertConsistentRoutingNodes(routingTable, nodes, routingNodes); this.minVersions = blocks.hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) - ? new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE) - : CompatibilityVersions.minimumVersions(compatibilityVersions); + ? new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE, Map.of()) // empty map because cluster state is unknown + : CompatibilityVersions.minimumVersions(compatibilityVersions.values()); } private static boolean assertConsistentRoutingNodes( @@ -287,6 +288,10 @@ public TransportVersion getMinTransportVersion() { return this.minVersions.transportVersion(); } + public Map getMinSystemIndexMappingVersions() { + return this.minVersions.systemIndexMappingsVersion(); + } + public Metadata metadata() { return this.metadata; } @@ -773,7 +778,8 @@ public DiscoveryNodes nodes() { } public Builder putTransportVersion(String nodeId, TransportVersion transportVersion) { - compatibilityVersions.put(nodeId, new CompatibilityVersions(Objects.requireNonNull(transportVersion, nodeId))); + // TODO[wrb]: system index mappings versions will be added in a followup + compatibilityVersions.put(nodeId, new CompatibilityVersions(Objects.requireNonNull(transportVersion, nodeId), Map.of())); return this; } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java index 3d70fca6723af..d8958f75c7aa5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java @@ -16,6 +16,7 @@ import org.elasticsearch.transport.TransportRequest; import java.io.IOException; +import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -66,7 +67,8 @@ public JoinRequest(StreamInput in) throws IOException { compatibilityVersions = CompatibilityVersions.readVersion(in); } else { // there's a 1-1 mapping from Version to TransportVersion before 8.8.0 - compatibilityVersions = new CompatibilityVersions(TransportVersion.fromId(sourceNode.getVersion().id)); + // no known mapping versions here + compatibilityVersions = new CompatibilityVersions(TransportVersion.fromId(sourceNode.getVersion().id), Map.of()); } minimumTerm = in.readLong(); optionalJoin = Optional.ofNullable(in.readOptionalWriteable(Join::new)); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 55cf6ea8a398d..dd52f20c7355a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -143,7 +143,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex CompatibilityVersions compatibilityVersions = nodeJoinTask.compatibilityVersions(); if (enforceVersionBarrier) { ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); - ensureTransportVersionBarrier(compatibilityVersions, compatibilityVersionsMap.values()); + CompatibilityVersions.ensureVersionsCompatibility(compatibilityVersions, compatibilityVersionsMap.values()); } blockForbiddenVersions(compatibilityVersions.transportVersion()); ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); diff --git a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java index 5e7692e645d6a..a9c5298a4325e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java +++ b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java @@ -13,24 +13,34 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Collection; import java.util.Comparator; +import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Wraps component version numbers for cluster state * *

Cluster state will need to carry version information for different independently versioned components. - * This wrapper lets us wrap these versions one level below {@link org.elasticsearch.cluster.ClusterState}. - * It's similar to {@link org.elasticsearch.cluster.node.VersionInformation}, but this class is meant to - * be constructed during node startup and hold values from plugins as well. + * This wrapper lets us wrap these versions one level below {@link org.elasticsearch.cluster.ClusterState}. It's similar to + * {@link org.elasticsearch.cluster.node.VersionInformation}, but this class is meant to be constructed during node startup and hold values + * from plugins as well. * - * @param transportVersion A transport version, usually a minimum compatible one for a node. + * @param transportVersion A transport version, usually a minimum compatible one for a node. + * @param systemIndexMappingsVersion A map of system index names to versions for their mappings. */ -public record CompatibilityVersions(TransportVersion transportVersion) implements Writeable, ToXContentFragment { +public record CompatibilityVersions( + TransportVersion transportVersion, + Map systemIndexMappingsVersion +) implements Writeable, ToXContentFragment { /** * Constructs a VersionWrapper collecting all the minimum versions from the values of the map. @@ -38,24 +48,73 @@ public record CompatibilityVersions(TransportVersion transportVersion) implement * @param compatibilityVersions A map of strings (typically node identifiers) and versions wrappers * @return Minimum versions for the cluster */ - public static CompatibilityVersions minimumVersions(Map compatibilityVersions) { - return new CompatibilityVersions( - compatibilityVersions.values() - .stream() - .map(CompatibilityVersions::transportVersion) - .min(Comparator.naturalOrder()) - // In practice transportVersions is always nonempty (except in tests) but use a conservative default anyway: - .orElse(TransportVersions.MINIMUM_COMPATIBLE) - ); + public static CompatibilityVersions minimumVersions(Collection compatibilityVersions) { + TransportVersion minimumTransport = compatibilityVersions.stream() + .map(CompatibilityVersions::transportVersion) + .min(Comparator.naturalOrder()) + // In practice transportVersions is always nonempty (except in tests) but use a conservative default anyway: + .orElse(TransportVersions.MINIMUM_COMPATIBLE); + + Map minimumMappingsVersions = compatibilityVersions.stream() + .flatMap(mv -> mv.systemIndexMappingsVersion().entrySet().stream()) + .collect( + Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (v1, v2) -> Stream.of(v1, v2).min(Comparator.naturalOrder()).get()) + ); + + return new CompatibilityVersions(minimumTransport, minimumMappingsVersions); + } + + public static void ensureVersionsCompatibility(CompatibilityVersions candidate, Collection existing) { + CompatibilityVersions minimumClusterVersions = minimumVersions(existing); + + if (candidate.transportVersion().before(minimumClusterVersions.transportVersion())) { + throw new IllegalStateException( + "node with transport version [" + + candidate.transportVersion() + + "] may not join a cluster with minimum transport version [" + + minimumClusterVersions.transportVersion() + + "]" + ); + } + + Map candidateInvalid = new HashMap<>(); + Map existingInvalid = new HashMap<>(); + for (Map.Entry candidates : candidate.systemIndexMappingsVersion().entrySet()) { + var mapping = minimumClusterVersions.systemIndexMappingsVersion().get(candidates.getKey()); + if (Objects.nonNull(mapping) && mapping.version() > candidates.getValue().version()) { + candidateInvalid.put(candidates.getKey(), candidates.getValue()); + existingInvalid.put(candidates.getKey(), minimumClusterVersions.systemIndexMappingsVersion().get(candidates.getKey())); + } + } + if (candidateInvalid.isEmpty() == false) { + throw new IllegalStateException( + "node with system index mappings versions [" + + candidateInvalid + + "] may not join a cluster with minimum system index mappings versions [" + + existingInvalid + + "]" + ); + } } public static CompatibilityVersions readVersion(StreamInput in) throws IOException { - return new CompatibilityVersions(TransportVersion.readVersion(in)); + TransportVersion transportVersion = TransportVersion.readVersion(in); + + Map mappingsVersions = Map.of(); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_073)) { + mappingsVersions = in.readMap(SystemIndexDescriptor.MappingsVersion::new); + } + + return new CompatibilityVersions(transportVersion, mappingsVersions); } @Override public void writeTo(StreamOutput out) throws IOException { TransportVersion.writeVersion(this.transportVersion(), out); + + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_073)) { + out.writeMap(this.systemIndexMappingsVersion(), (o, v) -> v.writeTo(o)); + } } /** @@ -69,6 +128,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("transport_version", this.transportVersion().toString()); + builder.field("mappings_versions", this.systemIndexMappingsVersion); return builder; } } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 4d37cea8bcf98..d6441a2920f43 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -18,15 +18,20 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.SystemIndexMetadataUpgradeService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.RegExp; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -649,7 +654,33 @@ public boolean isInternal() { * The hash is a hash of the system index descriptor's mappings so that we can warn * in case of inconsistencies across nodes. */ - public record MappingsVersion(int version, int hash) {}; + public record MappingsVersion(int version, int hash) implements Writeable, ToXContent, Comparable { + + public MappingsVersion(StreamInput in) throws IOException { + this(in.readVInt(), in.readInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(version); + out.writeInt(hash); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("version", version); + builder.field("hash", hash); + builder.endObject(); + return builder; + } + + @Override + public int compareTo(MappingsVersion o) { + Objects.requireNonNull(o, "Cannot compare null MappingsVersion"); + return Integer.compare(this.version, o.version); + } + } /** * Provides a fluent API for building a {@link SystemIndexDescriptor}. Validation still happens in that class. diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 8a6cb1f82bc05..111dc5ec72165 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -622,7 +622,10 @@ protected Node( resourcesToClose.add(circuitBreakerService); modules.add(new GatewayModule()); - CompatibilityVersions compatibilityVersions = new CompatibilityVersions(TransportVersion.current()); + CompatibilityVersions compatibilityVersions = new CompatibilityVersions( + TransportVersion.current(), + systemIndices.getMappingsVersions() + ); PageCacheRecycler pageCacheRecycler = createPageCacheRecycler(settings); BigArrays bigArrays = createBigArrays(pageCacheRecycler, circuitBreakerService); modules.add(settingsModule); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 80ff2168f6344..b5ab63140e433 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -129,7 +129,8 @@ public void testToXContentWithDeprecatedClusterState() { "nodes_versions": [ { "node_id": "node0", - "transport_version": "8000099" + "transport_version": "8000099", + "mappings_versions": {} } ], "metadata": { diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index b96a362b2a867..243d30ccf811f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Iterators; @@ -44,6 +45,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -211,7 +213,13 @@ public void testToXContent() throws IOException { "nodes_versions" : [ { "node_id" : "nodeId1", - "transport_version" : "%s" + "transport_version" : "%s", + "mappings_versions" : { + ".tasks" : { + "version" : 1, + "hash" : 1 + } + } } ], "metadata": { @@ -466,7 +474,13 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti "nodes_versions" : [ { "node_id" : "nodeId1", - "transport_version" : "%s" + "transport_version" : "%s", + "mappings_versions" : { + ".tasks" : { + "version" : 1, + "hash" : 1 + } + } } ], "metadata" : { @@ -717,7 +731,13 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti "nodes_versions" : [ { "node_id" : "nodeId1", - "transport_version" : "%s" + "transport_version" : "%s", + "mappings_versions" : { + ".tasks" : { + "version" : 1, + "hash" : 1 + } + } } ], "metadata" : { @@ -1029,7 +1049,12 @@ private ClusterState buildClusterState() throws IOException { .add(DiscoveryNodeUtils.create("nodeId1", new TransportAddress(InetAddress.getByName("127.0.0.1"), 111))) .build() ) - .putTransportVersion("nodeId1", TransportVersion.current()) + .compatibilityVersions( + Map.of( + "nodeId1", + new CompatibilityVersions(TransportVersion.current(), Map.of(".tasks", new SystemIndexDescriptor.MappingsVersion(1, 1))) + ) + ) .blocks( ClusterBlocks.builder() .addGlobalBlock( diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java index 676914ec0bed2..f779d5ea56dfa 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java @@ -11,12 +11,14 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.version.CompatibilityVersions; +import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.test.EqualsHashCodeTestUtils.CopyFunction; import org.elasticsearch.test.TransportVersionUtils; +import java.util.Map; import java.util.Optional; import java.util.Set; @@ -243,7 +245,7 @@ public void testJoinRequestEqualsHashCodeSerialization() { ); JoinRequest initialJoinRequest = new JoinRequest( initialJoin.getSourceNode(), - new CompatibilityVersions(TransportVersionUtils.randomVersion()), + CompatibilityVersionsUtils.fakeSystemIndicesRandom(), randomNonNegativeLong(), randomBoolean() ? Optional.empty() : Optional.of(initialJoin) ); @@ -263,7 +265,8 @@ public void testJoinRequestEqualsHashCodeSerialization() { return new JoinRequest( joinRequest.getSourceNode(), new CompatibilityVersions( - TransportVersionUtils.randomVersion(Set.of(joinRequest.getCompatibilityVersions().transportVersion())) + TransportVersionUtils.randomVersion(Set.of(joinRequest.getCompatibilityVersions().transportVersion())), + Map.of() ), joinRequest.getMinimumTerm(), joinRequest.getOptionalJoin() diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java index 4807d5ee984ca..19aa035817a01 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.cluster.coordination; import org.apache.logging.log4j.Level; -import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -28,7 +27,6 @@ import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; -import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.Priority; import org.elasticsearch.common.ReferenceDocs; @@ -38,7 +36,6 @@ import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -48,7 +45,6 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.stream.IntStream; import java.util.stream.Stream; import static org.elasticsearch.cluster.metadata.DesiredNodesTestCase.assertDesiredNodesStatusIsCorrect; @@ -157,33 +153,6 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { } } - public void testPreventJoinClusterWithUnsupportedTransportVersion() { - List versions = IntStream.range(0, randomIntBetween(2, 10)) - .mapToObj(i -> TransportVersionUtils.randomCompatibleVersion(random())) - .toList(); - TransportVersion min = Collections.min(versions); - List compatibilityVersions = versions.stream().map(CompatibilityVersions::new).toList(); - - // should not throw - NodeJoinExecutor.ensureTransportVersionBarrier( - new CompatibilityVersions(TransportVersionUtils.randomVersionBetween(random(), min, TransportVersion.current())), - compatibilityVersions - ); - expectThrows( - IllegalStateException.class, - () -> NodeJoinExecutor.ensureTransportVersionBarrier( - new CompatibilityVersions( - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersionUtils.getFirstVersion(), - TransportVersionUtils.getPreviousVersion(min) - ) - ), - compatibilityVersions - ) - ); - } - public void testSuccess() { Settings.builder().build(); Metadata.Builder metaBuilder = Metadata.builder(); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java index 3d488b6d55bff..f213d7e366ce4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java @@ -112,7 +112,7 @@ public void testNothingFixedWhenNothingToInfer() { ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) .nodes(node(Version.V_8_8_0)) - .compatibilityVersions(versions(new CompatibilityVersions(TransportVersions.V_8_8_0))) + .compatibilityVersions(versions(new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of()))) .build(); TransportVersionsFixupListener listeners = new TransportVersionsFixupListener(taskQueue, client, null, null); @@ -127,7 +127,7 @@ public void testNothingFixedWhenOnNextVersion() { ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) .nodes(node(NEXT_VERSION)) - .compatibilityVersions(versions(new CompatibilityVersions(NEXT_TRANSPORT_VERSION))) + .compatibilityVersions(versions(new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of()))) .build(); TransportVersionsFixupListener listeners = new TransportVersionsFixupListener(taskQueue, client, null, null); @@ -143,7 +143,10 @@ public void testNothingFixedWhenOnPreviousVersion() { ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) .nodes(node(Version.V_8_7_0, Version.V_8_8_0)) .compatibilityVersions( - Maps.transformValues(versions(TransportVersions.V_8_7_0, TransportVersions.V_8_8_0), CompatibilityVersions::new) + Maps.transformValues( + versions(TransportVersions.V_8_7_0, TransportVersions.V_8_8_0), + transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) + ) ) .build(); @@ -163,7 +166,7 @@ public void testVersionsAreFixed() { .compatibilityVersions( Maps.transformValues( versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - CompatibilityVersions::new + transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) ) ) .build(); @@ -192,7 +195,7 @@ public void testConcurrentChangesDoNotOverlap() { .compatibilityVersions( Maps.transformValues( versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - CompatibilityVersions::new + transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) ) ) .build(); @@ -207,7 +210,7 @@ public void testConcurrentChangesDoNotOverlap() { .compatibilityVersions( Maps.transformValues( versions(NEXT_TRANSPORT_VERSION, NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0), - CompatibilityVersions::new + transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) ) ) .build(); @@ -228,7 +231,7 @@ public void testFailedRequestsAreRetried() { .compatibilityVersions( Maps.transformValues( versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - CompatibilityVersions::new + transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) ) ) .build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java b/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java index 0391cbf83608c..b3b598f2bd38c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java @@ -10,21 +10,75 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.stream.IntStream; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class CompatibilityVersionsTests extends ESTestCase { - public void testMinimumVersions() { + public void testEmptyVersionsList() { assertThat( - CompatibilityVersions.minimumVersions(Map.of()), - equalTo(new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE)) + CompatibilityVersions.minimumVersions(List.of()), + equalTo(new CompatibilityVersions(TransportVersions.MINIMUM_COMPATIBLE, Map.of())) ); + } + + public void testMinimumTransportVersions() { + TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_COMPATIBLE, true); + TransportVersion version2 = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersionUtils.getNextVersion(version1, true), + TransportVersion.current() + ); + + CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(version1, Map.of()); + CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(version2, Map.of()); + + List versions = List.of(compatibilityVersions1, compatibilityVersions2); + + assertThat(CompatibilityVersions.minimumVersions(versions), equalTo(compatibilityVersions1)); + } + + public void testMinimumMappingsVersions() { + SystemIndexDescriptor.MappingsVersion v1 = new SystemIndexDescriptor.MappingsVersion(1, 1); + SystemIndexDescriptor.MappingsVersion v2 = new SystemIndexDescriptor.MappingsVersion(2, 2); + SystemIndexDescriptor.MappingsVersion v3 = new SystemIndexDescriptor.MappingsVersion(3, 3); + Map mappings1 = Map.of(".system-index-1", v3, ".system-index-2", v1); + Map mappings2 = Map.of(".system-index-1", v2, ".system-index-2", v2); + Map mappings3 = Map.of(".system-index-3", v1); + + CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(TransportVersion.current(), mappings1); + CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(TransportVersion.current(), mappings2); + CompatibilityVersions compatibilityVersions3 = new CompatibilityVersions(TransportVersion.current(), mappings3); + + List versions = List.of(compatibilityVersions1, compatibilityVersions2, compatibilityVersions3); + + assertThat( + CompatibilityVersions.minimumVersions(versions), + equalTo( + new CompatibilityVersions( + TransportVersion.current(), + Map.of(".system-index-1", v2, ".system-index-2", v1, ".system-index-3", v1) + ) + ) + ); + } + /** + * By design, all versions should increase monotonically through releases, so we shouldn't have a situation + * where the minimum transport version is in one CompatibilityVersions object and a minimum system + * index is in another. However, the minimumVersions method we're testing will handle that situation without + * complaint. + */ + public void testMinimumsAreMerged() { TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersions.MINIMUM_COMPATIBLE, true); TransportVersion version2 = TransportVersionUtils.randomVersionBetween( random(), @@ -32,11 +86,84 @@ public void testMinimumVersions() { TransportVersion.current() ); - CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(version1); - CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(version2); + SystemIndexDescriptor.MappingsVersion v1 = new SystemIndexDescriptor.MappingsVersion(1, 1); + SystemIndexDescriptor.MappingsVersion v2 = new SystemIndexDescriptor.MappingsVersion(2, 2); + Map mappings1 = Map.of(".system-index-1", v2); + Map mappings2 = Map.of(".system-index-1", v1); + + CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(version1, mappings1); + CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(version2, mappings2); + + List versions = List.of(compatibilityVersions1, compatibilityVersions2); + + assertThat(CompatibilityVersions.minimumVersions(versions), equalTo(new CompatibilityVersions(version1, mappings2))); + } + + public void testPreventJoinClusterWithUnsupportedTransportVersion() { + List transportVersions = IntStream.range(0, randomIntBetween(2, 10)) + .mapToObj(i -> TransportVersionUtils.randomCompatibleVersion(random())) + .toList(); + TransportVersion min = Collections.min(transportVersions); + List compatibilityVersions = transportVersions.stream() + .map(transportVersion -> new CompatibilityVersions(transportVersion, Map.of())) + .toList(); + + // should not throw + CompatibilityVersions.ensureVersionsCompatibility( + new CompatibilityVersions(TransportVersionUtils.randomVersionBetween(random(), min, TransportVersion.current()), Map.of()), + compatibilityVersions + ); + + IllegalStateException e = expectThrows( + IllegalStateException.class, + () -> CompatibilityVersions.ensureVersionsCompatibility( + new CompatibilityVersions( + TransportVersionUtils.randomVersionBetween( + random(), + TransportVersionUtils.getFirstVersion(), + TransportVersionUtils.getPreviousVersion(min) + ), + Map.of() + ), + compatibilityVersions + ) + ); + assertThat(e.getMessage(), containsString("may not join a cluster with minimum transport version")); + } + + public void testPreventJoinClusterWithUnsupportedMappingsVersion() { + List compatibilityVersions = IntStream.range(0, randomIntBetween(2, 10)) + .mapToObj( + i -> new CompatibilityVersions( + TransportVersion.current(), + Map.of(".system-index", new SystemIndexDescriptor.MappingsVersion(randomIntBetween(2, 10), -1)) + ) + ) + .toList(); + int min = compatibilityVersions.stream() + .mapToInt(v -> v.systemIndexMappingsVersion().get(".system-index").version()) + .min() + .orElse(2); - Map versionsMap = Map.of("node1", compatibilityVersions1, "node2", compatibilityVersions2); + // should not throw + CompatibilityVersions.ensureVersionsCompatibility( + new CompatibilityVersions( + TransportVersion.current(), + Map.of(".system-index", new SystemIndexDescriptor.MappingsVersion(min, -1)) + ), + compatibilityVersions + ); - assertThat(CompatibilityVersions.minimumVersions(versionsMap), equalTo(compatibilityVersions1)); + IllegalStateException e = expectThrows( + IllegalStateException.class, + () -> CompatibilityVersions.ensureVersionsCompatibility( + new CompatibilityVersions( + TransportVersion.current(), + Map.of(".system-index", new SystemIndexDescriptor.MappingsVersion(randomIntBetween(1, min - 1), -1)) + ), + compatibilityVersions + ) + ); + assertThat(e.getMessage(), containsString("may not join a cluster with minimum system index mappings versions")); } } diff --git a/server/src/test/java/org/elasticsearch/indices/SystemIndexDescriptorTests.java b/server/src/test/java/org/elasticsearch/indices/SystemIndexDescriptorTests.java index 7a651dea0c878..1a372de6129a3 100644 --- a/server/src/test/java/org/elasticsearch/indices/SystemIndexDescriptorTests.java +++ b/server/src/test/java/org/elasticsearch/indices/SystemIndexDescriptorTests.java @@ -386,6 +386,18 @@ public void testNegativeMappingsVersion() { assertThat(e.getMessage(), equalTo("The mappings version must not be negative")); } + public void testMappingsVersionCompareTo() { + SystemIndexDescriptor.MappingsVersion mv1 = new SystemIndexDescriptor.MappingsVersion(1, randomInt(20)); + SystemIndexDescriptor.MappingsVersion mv2 = new SystemIndexDescriptor.MappingsVersion(2, randomInt(20)); + + NullPointerException e = expectThrows(NullPointerException.class, () -> mv1.compareTo(null)); + assertThat(e.getMessage(), equalTo("Cannot compare null MappingsVersion")); + + assertThat(mv1.compareTo(mv2), equalTo(-1)); + assertThat(mv1.compareTo(mv1), equalTo(0)); + assertThat(mv2.compareTo(mv1), equalTo(1)); + } + public void testHashesIgnoreMappingMetadata() { String mappingFormatString = """ { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index b0ad7d333d172..e547a736df034 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -422,7 +422,7 @@ public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List new JoinTask.NodeJoinTask( node, - new CompatibilityVersions(transportVersion), + new CompatibilityVersions(transportVersion, Map.of()), DUMMY_REASON, ActionListener.running(() -> { throw new AssertionError("should not complete publication"); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersionsUtils.java b/test/framework/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersionsUtils.java index 80815dffa4a30..b8949cb92da71 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersionsUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersionsUtils.java @@ -9,8 +9,13 @@ package org.elasticsearch.cluster.version; import org.elasticsearch.TransportVersion; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import java.util.Map; + public class CompatibilityVersionsUtils { /** @@ -23,7 +28,7 @@ public class CompatibilityVersionsUtils { * @return Compatibility versions known at compile time. */ public static CompatibilityVersions staticCurrent() { - return new CompatibilityVersions(TransportVersion.current()); + return new CompatibilityVersions(TransportVersion.current(), Map.of()); } /** @@ -34,6 +39,20 @@ public static CompatibilityVersions staticCurrent() { * @return Random valid compatibility versions */ public static CompatibilityVersions staticRandom() { - return new CompatibilityVersions(TransportVersionUtils.randomVersion()); + return new CompatibilityVersions(TransportVersionUtils.randomVersion(), Map.of()); + } + + public static CompatibilityVersions fakeSystemIndicesRandom() { + return new CompatibilityVersions( + TransportVersionUtils.randomVersion(), + ESTestCase.randomMap( + 0, + 3, + () -> Tuple.tuple( + "." + ESTestCase.randomAlphaOfLength(5), + new SystemIndexDescriptor.MappingsVersion(ESTestCase.randomInt(20), ESTestCase.randomInt()) + ) + ) + ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java index 87743158995d4..c85fda5eee4b3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java @@ -23,13 +23,13 @@ public class TransportVersionUtilsTests extends ESTestCase { private static final Map transportVersions = Map.of( "Alfredo", - new CompatibilityVersions(TransportVersions.V_7_0_0), + new CompatibilityVersions(TransportVersions.V_7_0_0, Map.of()), "Bertram", - new CompatibilityVersions(TransportVersions.V_7_0_1), + new CompatibilityVersions(TransportVersions.V_7_0_1, Map.of()), "Charles", - new CompatibilityVersions(TransportVersions.V_8_500_020), + new CompatibilityVersions(TransportVersions.V_8_500_020, Map.of()), "Dominic", - new CompatibilityVersions(TransportVersions.V_8_0_0) + new CompatibilityVersions(TransportVersions.V_8_0_0, Map.of()) ); private static final ClusterState state = new ClusterState( @@ -53,7 +53,10 @@ public void testGetMinTransportVersion() { public void testIsMinTransformVersionSameAsCurrent() { assertThat(TransportVersionUtils.isMinTransportVersionSameAsCurrent(state), equalTo(false)); - Map transportVersions1 = Map.of("Eugene", new CompatibilityVersions(TransportVersion.current())); + Map transportVersions1 = Map.of( + "Eugene", + new CompatibilityVersions(TransportVersion.current(), Map.of()) + ); ClusterState state1 = new ClusterState( new ClusterName("harry"), From 0d8a1975a96355e0d7b194604b612209f810173d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 12 Sep 2023 11:46:09 -0400 Subject: [PATCH 018/114] ESQL: Fix test for unsigned long (#99441) We were generating negative values which made the tests confused. --- .../esql/functions/types/to_string.asciidoc | 1 + .../org/elasticsearch/test/ESTestCase.java | 24 ++++++++++++++++++ .../test/test/ESTestCaseTests.java | 24 ++++++++++++++++++ .../expression/function/TestCaseSupplier.java | 8 +++--- .../scalar/convert/ToStringTests.java | 25 ++++++++----------- .../function/scalar/math/FloorTests.java | 4 +-- .../function/scalar/math/Log10Tests.java | 4 +-- .../function/scalar/math/SqrtTests.java | 4 +-- 8 files changed, 66 insertions(+), 28 deletions(-) diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index c4c0129c32d53..b8fcd4477aa70 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -9,5 +9,6 @@ ip | keyword keyword | keyword long | keyword text | keyword +unsigned_long | keyword version | keyword |=== diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 0b917c90b69eb..bdf3a1e8b5018 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -750,6 +750,30 @@ public static long randomLongBetween(long min, long max) { return RandomNumbers.randomLongBetween(random(), min, max); } + /** + * The maximum value that can be represented as an unsigned long. + */ + public static final BigInteger UNSIGNED_LONG_MAX = BigInteger.ONE.shiftLeft(Long.SIZE).subtract(BigInteger.ONE); + + /** + * A unsigned long in a {@link BigInteger} between min (inclusive) and max (inclusive). + */ + public static BigInteger randomUnsignedLongBetween(BigInteger min, BigInteger max) { + if (min.compareTo(BigInteger.ZERO) < 0) { + throw new IllegalArgumentException("Must be between [0] and [" + UNSIGNED_LONG_MAX + "]"); + } + if (0 < max.compareTo(UNSIGNED_LONG_MAX)) { + throw new IllegalArgumentException("Must be between [0] and [" + UNSIGNED_LONG_MAX + "]"); + } + // Shift the min and max down into the long range + long minShifted = min.add(BigInteger.valueOf(Long.MIN_VALUE)).longValueExact(); + long maxShifted = max.add(BigInteger.valueOf(Long.MIN_VALUE)).longValueExact(); + // Grab a random number in that range + long randomShifted = randomLongBetween(minShifted, maxShifted); + // Shift back up into long range + return BigInteger.valueOf(randomShifted).subtract(BigInteger.valueOf(Long.MIN_VALUE)); + } + /** * Returns a "scaled" number of iterations for loops which can have a variable * iteration count. This method is effectively diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java index b9059f751f626..125c0563577fc 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java @@ -21,6 +21,7 @@ import org.junit.AssumptionViolatedException; import java.io.IOException; +import java.math.BigInteger; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.SecureRandom; @@ -37,12 +38,14 @@ import javax.crypto.KeyGenerator; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assume.assumeThat; @@ -358,4 +361,25 @@ public void testFipsKeyGenWithNonFipsSecureRandom() { assertThat(t.getClass().getCanonicalName(), is(equalTo("org.bouncycastle.crypto.fips.FipsUnapprovedOperationError"))); assertThat(t.getMessage(), is(equalTo("Attempt to create key with unapproved RNG: AES"))); } + + public void testRandomUnsignedLongBetween() { + assertThat( + randomUnsignedLongBetween(BigInteger.ZERO, UNSIGNED_LONG_MAX), + both(greaterThanOrEqualTo(BigInteger.ZERO)).and(lessThanOrEqualTo(UNSIGNED_LONG_MAX)) + ); + + BigInteger from = BigInteger.valueOf(randomLong()).subtract(BigInteger.valueOf(Long.MIN_VALUE)); + BigInteger to = BigInteger.valueOf(randomLong()).subtract(BigInteger.valueOf(Long.MIN_VALUE)); + if (from.compareTo(to) > 0) { + BigInteger s = from; + from = to; + to = s; + } + assertThat(randomUnsignedLongBetween(from, to), both(greaterThanOrEqualTo(from)).and(lessThanOrEqualTo(to))); + } + + public void testRandomUnsignedLongBetweenDegenerate() { + BigInteger target = BigInteger.valueOf(randomLong()).subtract(BigInteger.valueOf(Long.MIN_VALUE)); + assertThat(randomUnsignedLongBetween(target, target), equalTo(target)); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 8b113bb12d605..0af3d11c6065d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -43,8 +43,6 @@ public record TestCaseSupplier(String name, List types, Supplier supplier) implements Supplier { - - public static final BigInteger MAX_UNSIGNED_LONG = NumericUtils.UNSIGNED_LONG_MAX; /** * Build a test case without types. * @@ -510,7 +508,7 @@ private static List ulongCases(BigInteger min, BigInteger max cases.add( new TypedDataSupplier( "", - () -> BigInteger.valueOf(ESTestCase.randomLongBetween(lower1.longValue(), upper1.longValue())), + () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1), DataTypes.UNSIGNED_LONG ) ); @@ -520,12 +518,12 @@ private static List ulongCases(BigInteger min, BigInteger max // Big values, greater than Long.MAX_VALUE BigInteger lower2 = min.max(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE)); - BigInteger upper2 = max.min(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.valueOf(Integer.MAX_VALUE))); + BigInteger upper2 = max.min(ESTestCase.UNSIGNED_LONG_MAX); if (lower2.compareTo(upper2) < 0) { cases.add( new TypedDataSupplier( "", - () -> BigInteger.valueOf(ESTestCase.randomLongBetween(lower2.longValue(), upper2.longValue())), + () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2), DataTypes.UNSIGNED_LONG ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 1b611c78f8e8a..4712a1afa9399 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -51,15 +52,15 @@ public static Iterable parameters() { Long.MAX_VALUE, List.of() ); - // TestCaseSupplier.forUnaryUnsignedLong( - // suppliers, - // "ToStringFromUnsignedLongEvaluator[field=" + read + "]", - // DataTypes.KEYWORD, - // ul -> new BytesRef(ul.toString()), - // BigInteger.ZERO, - // MAX_UNSIGNED_LONG, - // List.of() - // ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToStringFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + ul -> new BytesRef(ul.toString()), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); TestCaseSupplier.forUnaryDouble( suppliers, "ToStringFromDoubleEvaluator[field=" + read + "]", @@ -98,11 +99,7 @@ public static Iterable parameters() { v -> new BytesRef(v.toString()), List.of() ); - return parameterSuppliersFromTypedData( - // errorsForCasesWithoutExamples( - anyNullIsNull(true, suppliers) - // ) - ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java index ee63353f2637d..f41b7c5de38ad 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.MAX_UNSIGNED_LONG; - public class FloorTests extends AbstractFunctionTestCase { public FloorTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -41,7 +39,7 @@ public static Iterable parameters() { DataTypes.UNSIGNED_LONG, ul -> NumericUtils.asLongUnsigned(ul), BigInteger.ZERO, - MAX_UNSIGNED_LONG, + UNSIGNED_LONG_MAX, List.of() ); TestCaseSupplier.forUnaryDouble( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 267c780ab4619..c688a43e4ea6a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.MAX_UNSIGNED_LONG; - public class Log10Tests extends AbstractFunctionTestCase { public Log10Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -58,7 +56,7 @@ public static Iterable parameters() { DataTypes.DOUBLE, ul -> Math.log10(ul == null ? null : NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ONE, - MAX_UNSIGNED_LONG, + UNSIGNED_LONG_MAX, List.of() ); TestCaseSupplier.forUnaryDouble( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index 4c13100b7c746..4e7b08dcf63be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.MAX_UNSIGNED_LONG; - public class SqrtTests extends AbstractFunctionTestCase { public SqrtTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -57,7 +55,7 @@ public static Iterable parameters() { DataTypes.DOUBLE, ul -> Math.sqrt(ul == null ? null : NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ZERO, - MAX_UNSIGNED_LONG, + UNSIGNED_LONG_MAX, List.of() ); TestCaseSupplier.forUnaryDouble( From b48640c8bf58e65fadc2e9fa6c9ee76c8ce4d503 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 12 Sep 2023 17:59:17 +0100 Subject: [PATCH 019/114] Fork computation in TransportGetShutdownStatusAction (#99490) This action does O(#shards) work so it must not happen on a transport worker. Also it can take minutes to complete in a huge cluster, so it really should react to cancellations properly. Closes #99487 --- docs/changelog/99490.yaml | 6 +++ .../master/TransportMasterNodeAction.java | 3 +- .../shutdown/GetShutdownStatusAction.java | 9 ++++ .../shutdown/RestGetShutdownStatusAction.java | 3 +- .../TransportGetShutdownStatusAction.java | 22 ++++++--- ...TransportGetShutdownStatusActionTests.java | 48 +++++++++++++++++++ 6 files changed, 82 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/99490.yaml diff --git a/docs/changelog/99490.yaml b/docs/changelog/99490.yaml new file mode 100644 index 0000000000000..07fd913f2c1c4 --- /dev/null +++ b/docs/changelog/99490.yaml @@ -0,0 +1,6 @@ +pr: 99490 +summary: Fork computation in `TransportGetShutdownStatusAction` +area: Infra/Node Lifecycle +type: bug +issues: + - 99487 diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index fd5f2ff785fa4..8f1bbffd09c0f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -118,10 +118,9 @@ protected abstract void masterOperation(Task task, Request request, ClusterState private void executeMasterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { - if (task instanceof CancellableTask && ((CancellableTask) task).isCancelled()) { + if (task instanceof CancellableTask cancellableTask && cancellableTask.isCancelled()) { throw new TaskCancelledException("Task was cancelled"); } - masterOperation(task, request, state, listener); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index b087577b797af..ff71308cbb46a 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -14,12 +14,16 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; public class GetShutdownStatusAction extends ActionType { @@ -69,6 +73,11 @@ public boolean equals(Object o) { public int hashCode() { return Arrays.hashCode(nodeIds); } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } } public static class Response extends ActionResponse implements ToXContentObject { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java index 1586ef481fe99..9b8ed274d63a5 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import java.util.List; @@ -36,7 +37,7 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { String[] nodeIds = Strings.commaDelimitedListToStringArray(request.param("nodeId")); - return channel -> client.execute( + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute( GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(nodeIds), new RestToXContentListener<>(channel) diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java index 1efb35ed50b11..425db500070e5 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java @@ -36,9 +36,11 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.snapshots.SnapshotsInfoService; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.Transports; import org.elasticsearch.xpack.core.ilm.ErrorStep; import org.elasticsearch.xpack.core.ilm.OperationMode; import org.elasticsearch.xpack.core.ilm.ShrinkAction; @@ -89,7 +91,7 @@ public TransportGetShutdownStatusAction( GetShutdownStatusAction.Request::readFrom, indexNameExpressionResolver, GetShutdownStatusAction.Response::new, - ThreadPool.Names.SAME + ThreadPool.Names.MANAGEMENT ); this.allocationService = allocationService; this.allocationDeciders = allocationDeciders; @@ -104,7 +106,8 @@ protected void masterOperation( GetShutdownStatusAction.Request request, ClusterState state, ActionListener listener - ) throws Exception { + ) { + CancellableTask cancellableTask = (CancellableTask) task; NodesShutdownMetadata nodesShutdownMetadata = state.metadata().custom(NodesShutdownMetadata.TYPE); GetShutdownStatusAction.Response response; @@ -118,6 +121,7 @@ protected void masterOperation( ns -> new SingleNodeShutdownStatus( ns, shardMigrationStatus( + cancellableTask, state, ns.getNodeId(), ns.getType(), @@ -142,6 +146,7 @@ protected void masterOperation( ns -> new SingleNodeShutdownStatus( ns, shardMigrationStatus( + cancellableTask, state, ns.getNodeId(), ns.getType(), @@ -165,6 +170,7 @@ protected void masterOperation( // pkg-private for testing static ShutdownShardMigrationStatus shardMigrationStatus( + CancellableTask cancellableTask, ClusterState currentState, String nodeId, SingleNodeShutdownMetadata.Type shutdownType, @@ -174,6 +180,8 @@ static ShutdownShardMigrationStatus shardMigrationStatus( AllocationService allocationService, AllocationDeciders allocationDeciders ) { + assert Transports.assertNotTransportThread("doing O(#shards) work must be forked"); + // Only REMOVE-type shutdowns will try to move shards, so RESTART-type shutdowns should immediately complete if (SingleNodeShutdownMetadata.Type.RESTART.equals(shutdownType)) { return new ShutdownShardMigrationStatus( @@ -208,6 +216,7 @@ static ShutdownShardMigrationStatus shardMigrationStatus( var unassignedShards = currentState.getRoutingNodes() .unassigned() .stream() + .peek(s -> cancellableTask.ensureNotCancelled()) .filter(s -> Objects.equals(s.unassignedInfo().getLastAllocatedNodeId(), nodeId)) .filter(s -> s.primary() || hasShardCopyOnAnotherNode(currentState, s, shuttingDownNodes) == false) .toList(); @@ -264,6 +273,7 @@ static ShutdownShardMigrationStatus shardMigrationStatus( Optional> unmovableShard = currentState.getRoutingNodes() .node(nodeId) .shardsWithState(ShardRoutingState.STARTED) + .peek(s -> cancellableTask.ensureNotCancelled()) .map(shardRouting -> new Tuple<>(shardRouting, allocationService.explainShardAllocation(shardRouting, allocation))) // Given that we're checking the status of a node that's shutting down, no shards should be allowed to remain .filter(pair -> { @@ -285,8 +295,8 @@ static ShutdownShardMigrationStatus shardMigrationStatus( }) // If ILM is shrinking the index this shard is part of, it'll look like it's unmovable, but we can just wait for ILM to finish .filter(pair -> isIlmRestrictingShardMovement(currentState, pair.v1()) == false) - .peek(pair -> { - logger.debug( + .peek( + pair -> logger.debug( "node [{}] shutdown of type [{}] stalled: found shard [{}][{}] from index [{}] with negative decision: [{}]", nodeId, shutdownType, @@ -294,8 +304,8 @@ static ShutdownShardMigrationStatus shardMigrationStatus( pair.v1().primary() ? "primary" : "replica", pair.v1().shardId().getIndexName(), Strings.toString(pair.v2()) - ); - }) + ) + ) .findFirst(); if (totalRemainingShards == shardsToIgnoreForFinalStatus.get() && unmovableShard.isEmpty()) { diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java index 58a7aa4973784..16ee53d8cb7c8 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusActionTests.java @@ -43,6 +43,10 @@ import org.elasticsearch.node.Node; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.elasticsearch.snapshots.SnapshotsInfoService; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskCancelHelper; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.gateway.TestGatewayAllocator; import org.elasticsearch.xpack.core.ilm.ErrorStep; @@ -152,6 +156,7 @@ public void testEmptyCluster() { ClusterState state = createTestClusterState(routingTable, List.of(), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -182,6 +187,7 @@ public void testRestartAlwaysComplete() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.RESTART); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.RESTART, @@ -218,6 +224,7 @@ public void testComplete() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -231,6 +238,39 @@ public void testComplete() { assertShardMigration(status, SingleNodeShutdownMetadata.Status.COMPLETE, 0, nullValue()); } + /** + * Ensures we check whether the task is cancelled during the computation + */ + public void testCancelled() { + Index index = new Index(randomAlphaOfLength(5), randomAlphaOfLengthBetween(1, 20)); + IndexMetadata imd = generateIndexMetadata(index, 1, 0); + IndexRoutingTable indexRoutingTable = IndexRoutingTable.builder(index) + .addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), SHUTTING_DOWN_NODE_ID, true, ShardRoutingState.STARTED)) + .build(); + + RoutingTable.Builder routingTable = RoutingTable.builder(); + routingTable.add(indexRoutingTable); + ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); + + final var task = new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()); + TaskCancelHelper.cancel(task, "test"); + + expectThrows( + TaskCancelledException.class, + () -> TransportGetShutdownStatusAction.shardMigrationStatus( + task, + state, + SHUTTING_DOWN_NODE_ID, + SingleNodeShutdownMetadata.Type.REMOVE, + true, + clusterInfoService, + snapshotsInfoService, + allocationService, + allocationDeciders + ) + ); + } + /** * Ensures that we properly detect "in progress" migrations while there are shards relocating off the node that's shutting down. */ @@ -263,6 +303,7 @@ public void testInProgressWithRelocatingShards() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -315,6 +356,7 @@ public void testInProgressWithShardsMovingBetweenOtherNodes() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -351,6 +393,7 @@ public void testStalled() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -441,6 +484,7 @@ public void testNotStalledIfAllShardsHaveACopyOnAnotherNode() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -473,6 +517,7 @@ public void testOnlyInitializingShardsRemaining() { ClusterState state = createTestClusterState(routingTable.build(), List.of(imd), SingleNodeShutdownMetadata.Type.REMOVE); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -548,6 +593,7 @@ public void testNodeNotInCluster() { .build(); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, bogusNodeId, SingleNodeShutdownMetadata.Type.REMOVE, @@ -642,6 +688,7 @@ private void checkStalledShardWithIlmState( state = setIlmOperationMode(state, operationMode); ShutdownShardMigrationStatus status = TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, @@ -810,6 +857,7 @@ private ShutdownShardMigrationStatus getUnassignedShutdownStatus(Index index, In ); return TransportGetShutdownStatusAction.shardMigrationStatus( + new CancellableTask(1, "direct", GetShutdownStatusAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), state, SHUTTING_DOWN_NODE_ID, SingleNodeShutdownMetadata.Type.REMOVE, From ba5b079fa657699dc8f7e8bcf8bc91fbab70f468 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 12 Sep 2023 10:06:31 -0700 Subject: [PATCH 020/114] Bump versions after 8.10.0 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 12 ++++++------ .buildkite/pipelines/periodic.yml | 12 ++++++------ .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 3 +-- server/src/main/java/org/elasticsearch/Version.java | 2 +- 6 files changed, 16 insertions(+), 17 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index bdff41788b563..92423539caf09 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -40,7 +40,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.14", "8.9.3", "8.10.0", "8.11.0"] + BWC_VERSION: ["7.17.14", "8.10.1", "8.11.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 3b70b82746ed8..6730a2d60e7e9 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1584,8 +1584,8 @@ steps: env: BWC_VERSION: 8.9.2 - - label: "{{matrix.image}} / 8.9.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.3 + - label: "{{matrix.image}} / 8.10.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.0 timeout_in_minutes: 300 matrix: setup: @@ -1598,10 +1598,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.9.3 + BWC_VERSION: 8.10.0 - - label: "{{matrix.image}} / 8.10.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.0 + - label: "{{matrix.image}} / 8.10.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.1 timeout_in_minutes: 300 matrix: setup: @@ -1614,7 +1614,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.10.0 + BWC_VERSION: 8.10.1 - label: "{{matrix.image}} / 8.11.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 29aec69bf3832..bc0dce06312a0 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -972,8 +972,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.9.2 - - label: 8.9.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.9.3#bwcTest + - label: 8.10.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.10.0#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -981,9 +981,9 @@ steps: machineType: custom-32-98304 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.9.3 - - label: 8.10.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.10.0#bwcTest + BWC_VERSION: 8.10.0 + - label: 8.10.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.10.1#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -991,7 +991,7 @@ steps: machineType: custom-32-98304 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.10.0 + BWC_VERSION: 8.10.1 - label: 8.11.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.11.0#bwcTest timeout_in_minutes: 300 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 42b124a8a0aca..902c5356f9bae 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -96,6 +96,6 @@ BWC_VERSION: - "8.9.0" - "8.9.1" - "8.9.2" - - "8.9.3" - "8.10.0" + - "8.10.1" - "8.11.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index da5aa1cdd64e1..bb57c7b896311 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,4 @@ BWC_VERSION: - "7.17.14" - - "8.9.3" - - "8.10.0" + - "8.10.1" - "8.11.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 37b382db2f2ca..a1bb7df3fef0a 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -147,8 +147,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_9_0 = new Version(8_09_00_99); public static final Version V_8_9_1 = new Version(8_09_01_99); public static final Version V_8_9_2 = new Version(8_09_02_99); - public static final Version V_8_9_3 = new Version(8_09_03_99); public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); public static final Version V_8_11_0 = new Version(8_11_00_99); public static final Version CURRENT = V_8_11_0; From b2f4534851fc3661312207951f9092319e983fe6 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 12 Sep 2023 10:08:06 -0700 Subject: [PATCH 021/114] Prune changelogs after 8.10.0 release --- docs/changelog/87674.yaml | 10 ---------- docs/changelog/92574.yaml | 5 ----- docs/changelog/93545.yaml | 5 ----- docs/changelog/94132.yaml | 5 ----- docs/changelog/96068.yaml | 5 ----- docs/changelog/96069.yaml | 6 ------ docs/changelog/96082.yaml | 5 ----- docs/changelog/96515.yaml | 5 ----- docs/changelog/96577.yaml | 15 --------------- docs/changelog/96689.yaml | 5 ----- docs/changelog/96717.yaml | 5 ----- docs/changelog/96772.yaml | 5 ----- docs/changelog/96895.yaml | 5 ----- docs/changelog/96930.yaml | 6 ------ docs/changelog/96982.yaml | 6 ------ docs/changelog/96994.yaml | 5 ----- docs/changelog/97029.yaml | 5 ----- docs/changelog/97040.yaml | 5 ----- docs/changelog/97041.yaml | 5 ----- docs/changelog/97079.yaml | 5 ----- docs/changelog/97092.yaml | 6 ------ docs/changelog/97110.yaml | 6 ------ docs/changelog/97142.yaml | 5 ----- docs/changelog/97159.yaml | 5 ----- docs/changelog/97203.yaml | 5 ----- docs/changelog/97208.yaml | 5 ----- docs/changelog/97209.yaml | 5 ----- docs/changelog/97224.yaml | 5 ----- docs/changelog/97234.yaml | 5 ----- docs/changelog/97281.yaml | 5 ----- docs/changelog/97319.yaml | 32 -------------------------------- docs/changelog/97332.yaml | 6 ------ docs/changelog/97344.yaml | 5 ----- docs/changelog/97380.yaml | 5 ----- docs/changelog/97387.yaml | 5 ----- docs/changelog/97395.yaml | 5 ----- docs/changelog/97410.yaml | 5 ----- docs/changelog/97416.yaml | 5 ----- docs/changelog/97463.yaml | 5 ----- docs/changelog/97466.yaml | 5 ----- docs/changelog/97488.yaml | 6 ------ docs/changelog/97539.yaml | 5 ----- docs/changelog/97540.yaml | 6 ------ docs/changelog/97550.yaml | 5 ----- docs/changelog/97557.yaml | 6 ------ docs/changelog/97581.yaml | 5 ----- docs/changelog/97588.yaml | 5 ----- docs/changelog/97591.yaml | 5 ----- docs/changelog/97594.yaml | 5 ----- docs/changelog/97602.yaml | 5 ----- docs/changelog/97630.yaml | 5 ----- docs/changelog/97700.yaml | 6 ------ docs/changelog/97703.yaml | 6 ------ docs/changelog/97705.yaml | 7 ------- docs/changelog/97711.yaml | 5 ----- docs/changelog/97718.yaml | 6 ------ docs/changelog/97724.yaml | 5 ----- docs/changelog/97726.yaml | 5 ----- docs/changelog/97732.yaml | 5 ----- docs/changelog/97755.yaml | 5 ----- docs/changelog/97765.yaml | 5 ----- docs/changelog/97773.yaml | 5 ----- docs/changelog/97822.yaml | 5 ----- docs/changelog/97850.yaml | 5 ----- docs/changelog/97869.yaml | 5 ----- docs/changelog/97890.yaml | 5 ----- docs/changelog/97899.yaml | 5 ----- docs/changelog/97920.yaml | 6 ------ docs/changelog/97922.yaml | 5 ----- docs/changelog/97961.yaml | 5 ----- docs/changelog/97962.yaml | 6 ------ docs/changelog/97967.yaml | 5 ----- docs/changelog/97985.yaml | 5 ----- docs/changelog/97992.yaml | 6 ------ docs/changelog/98001.yaml | 5 ----- docs/changelog/98051.yaml | 5 ----- docs/changelog/98067.yaml | 6 ------ docs/changelog/98083.yaml | 5 ----- docs/changelog/98085.yaml | 5 ----- docs/changelog/98124.yaml | 6 ------ docs/changelog/98184.yaml | 5 ----- docs/changelog/98187.yaml | 5 ----- docs/changelog/98204.yaml | 13 ------------- docs/changelog/98206.yaml | 5 ----- docs/changelog/98209.yaml | 5 ----- docs/changelog/98246.yaml | 5 ----- docs/changelog/98259.yaml | 6 ------ docs/changelog/98265.yaml | 5 ----- docs/changelog/98295.yaml | 6 ------ docs/changelog/98307.yaml | 13 ------------- docs/changelog/98324.yaml | 5 ----- docs/changelog/98354.yaml | 6 ------ docs/changelog/98367.yaml | 6 ------ docs/changelog/98386.yaml | 5 ----- docs/changelog/98414.yaml | 5 ----- docs/changelog/98420.yaml | 5 ----- docs/changelog/98425.yaml | 5 ----- docs/changelog/98455.yaml | 5 ----- docs/changelog/98459.yaml | 5 ----- docs/changelog/98508.yaml | 5 ----- docs/changelog/98692.yaml | 5 ----- docs/changelog/98808.yaml | 6 ------ docs/changelog/98828.yaml | 5 ----- docs/changelog/98864.yaml | 5 ----- docs/changelog/98884.yaml | 5 ----- docs/changelog/98961.yaml | 5 ----- docs/changelog/98970.yaml | 5 ----- docs/changelog/98987.yaml | 6 ------ docs/changelog/99007.yaml | 5 ----- docs/changelog/99111.yaml | 5 ----- 110 files changed, 633 deletions(-) delete mode 100644 docs/changelog/87674.yaml delete mode 100644 docs/changelog/92574.yaml delete mode 100644 docs/changelog/93545.yaml delete mode 100644 docs/changelog/94132.yaml delete mode 100644 docs/changelog/96068.yaml delete mode 100644 docs/changelog/96069.yaml delete mode 100644 docs/changelog/96082.yaml delete mode 100644 docs/changelog/96515.yaml delete mode 100644 docs/changelog/96577.yaml delete mode 100644 docs/changelog/96689.yaml delete mode 100644 docs/changelog/96717.yaml delete mode 100644 docs/changelog/96772.yaml delete mode 100644 docs/changelog/96895.yaml delete mode 100644 docs/changelog/96930.yaml delete mode 100644 docs/changelog/96982.yaml delete mode 100644 docs/changelog/96994.yaml delete mode 100644 docs/changelog/97029.yaml delete mode 100644 docs/changelog/97040.yaml delete mode 100644 docs/changelog/97041.yaml delete mode 100644 docs/changelog/97079.yaml delete mode 100644 docs/changelog/97092.yaml delete mode 100644 docs/changelog/97110.yaml delete mode 100644 docs/changelog/97142.yaml delete mode 100644 docs/changelog/97159.yaml delete mode 100644 docs/changelog/97203.yaml delete mode 100644 docs/changelog/97208.yaml delete mode 100644 docs/changelog/97209.yaml delete mode 100644 docs/changelog/97224.yaml delete mode 100644 docs/changelog/97234.yaml delete mode 100644 docs/changelog/97281.yaml delete mode 100644 docs/changelog/97319.yaml delete mode 100644 docs/changelog/97332.yaml delete mode 100644 docs/changelog/97344.yaml delete mode 100644 docs/changelog/97380.yaml delete mode 100644 docs/changelog/97387.yaml delete mode 100644 docs/changelog/97395.yaml delete mode 100644 docs/changelog/97410.yaml delete mode 100644 docs/changelog/97416.yaml delete mode 100644 docs/changelog/97463.yaml delete mode 100644 docs/changelog/97466.yaml delete mode 100644 docs/changelog/97488.yaml delete mode 100644 docs/changelog/97539.yaml delete mode 100644 docs/changelog/97540.yaml delete mode 100644 docs/changelog/97550.yaml delete mode 100644 docs/changelog/97557.yaml delete mode 100644 docs/changelog/97581.yaml delete mode 100644 docs/changelog/97588.yaml delete mode 100644 docs/changelog/97591.yaml delete mode 100644 docs/changelog/97594.yaml delete mode 100644 docs/changelog/97602.yaml delete mode 100644 docs/changelog/97630.yaml delete mode 100644 docs/changelog/97700.yaml delete mode 100644 docs/changelog/97703.yaml delete mode 100644 docs/changelog/97705.yaml delete mode 100644 docs/changelog/97711.yaml delete mode 100644 docs/changelog/97718.yaml delete mode 100644 docs/changelog/97724.yaml delete mode 100644 docs/changelog/97726.yaml delete mode 100644 docs/changelog/97732.yaml delete mode 100644 docs/changelog/97755.yaml delete mode 100644 docs/changelog/97765.yaml delete mode 100644 docs/changelog/97773.yaml delete mode 100644 docs/changelog/97822.yaml delete mode 100644 docs/changelog/97850.yaml delete mode 100644 docs/changelog/97869.yaml delete mode 100644 docs/changelog/97890.yaml delete mode 100644 docs/changelog/97899.yaml delete mode 100644 docs/changelog/97920.yaml delete mode 100644 docs/changelog/97922.yaml delete mode 100644 docs/changelog/97961.yaml delete mode 100644 docs/changelog/97962.yaml delete mode 100644 docs/changelog/97967.yaml delete mode 100644 docs/changelog/97985.yaml delete mode 100644 docs/changelog/97992.yaml delete mode 100644 docs/changelog/98001.yaml delete mode 100644 docs/changelog/98051.yaml delete mode 100644 docs/changelog/98067.yaml delete mode 100644 docs/changelog/98083.yaml delete mode 100644 docs/changelog/98085.yaml delete mode 100644 docs/changelog/98124.yaml delete mode 100644 docs/changelog/98184.yaml delete mode 100644 docs/changelog/98187.yaml delete mode 100644 docs/changelog/98204.yaml delete mode 100644 docs/changelog/98206.yaml delete mode 100644 docs/changelog/98209.yaml delete mode 100644 docs/changelog/98246.yaml delete mode 100644 docs/changelog/98259.yaml delete mode 100644 docs/changelog/98265.yaml delete mode 100644 docs/changelog/98295.yaml delete mode 100644 docs/changelog/98307.yaml delete mode 100644 docs/changelog/98324.yaml delete mode 100644 docs/changelog/98354.yaml delete mode 100644 docs/changelog/98367.yaml delete mode 100644 docs/changelog/98386.yaml delete mode 100644 docs/changelog/98414.yaml delete mode 100644 docs/changelog/98420.yaml delete mode 100644 docs/changelog/98425.yaml delete mode 100644 docs/changelog/98455.yaml delete mode 100644 docs/changelog/98459.yaml delete mode 100644 docs/changelog/98508.yaml delete mode 100644 docs/changelog/98692.yaml delete mode 100644 docs/changelog/98808.yaml delete mode 100644 docs/changelog/98828.yaml delete mode 100644 docs/changelog/98864.yaml delete mode 100644 docs/changelog/98884.yaml delete mode 100644 docs/changelog/98961.yaml delete mode 100644 docs/changelog/98970.yaml delete mode 100644 docs/changelog/98987.yaml delete mode 100644 docs/changelog/99007.yaml delete mode 100644 docs/changelog/99111.yaml diff --git a/docs/changelog/87674.yaml b/docs/changelog/87674.yaml deleted file mode 100644 index ffc821def0375..0000000000000 --- a/docs/changelog/87674.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 87674 -summary: Mark `apm_user` for removal in a future major release -area: Authorization -type: deprecation -issues: [] -deprecation: - title: Mark `apm_user` for removal in a future major release - area: Authorization - details: The `apm_user` role has been deprecated and will be removed in a future major release. Users should migrate to `editor` and `viewer` roles - impact: Users will have to migrate to `editor` and `viewer` roles diff --git a/docs/changelog/92574.yaml b/docs/changelog/92574.yaml deleted file mode 100644 index 58ff04c61eb04..0000000000000 --- a/docs/changelog/92574.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 92574 -summary: cleanup some code NoriTokenizerFactory and KuromojiTokenizerFactory -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/93545.yaml b/docs/changelog/93545.yaml deleted file mode 100644 index 4367e44024e58..0000000000000 --- a/docs/changelog/93545.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 93545 -summary: Improve error message when aggregation doesn't support counter field -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/94132.yaml b/docs/changelog/94132.yaml deleted file mode 100644 index e53bc9bd5fabb..0000000000000 --- a/docs/changelog/94132.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 94132 -summary: HDFS plugin add replication_factor param -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/96068.yaml b/docs/changelog/96068.yaml deleted file mode 100644 index 9faf74414d36e..0000000000000 --- a/docs/changelog/96068.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96068 -summary: Use the Weight#matches mode for highlighting by default -area: Search -type: enhancement -issues: [] \ No newline at end of file diff --git a/docs/changelog/96069.yaml b/docs/changelog/96069.yaml deleted file mode 100644 index e665f0384c730..0000000000000 --- a/docs/changelog/96069.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 96069 -summary: Improve MatchNoDocsQuery description -area: Search -type: enhancement -issues: - - 95741 diff --git a/docs/changelog/96082.yaml b/docs/changelog/96082.yaml deleted file mode 100644 index 225ac9ffa0b5d..0000000000000 --- a/docs/changelog/96082.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96082 -summary: Support minimum_should_match field for terms_set query -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/96515.yaml b/docs/changelog/96515.yaml deleted file mode 100644 index bf6403f6990ce..0000000000000 --- a/docs/changelog/96515.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96515 -summary: Support boxplot aggregation in transform -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/96577.yaml b/docs/changelog/96577.yaml deleted file mode 100644 index c27ff562f5189..0000000000000 --- a/docs/changelog/96577.yaml +++ /dev/null @@ -1,15 +0,0 @@ -pr: 96577 -summary: Remove the unused executor builder for vector tile plugin -area: Geo -type: breaking -issues: [] -breaking: - title: Remove the unused executor builder for vector tile plugin - area: Cluster and node setting - details: "The threadpool called `vectortile` is a left over from the original development of the\ - \ vector tile search end point and it is used nowhere. It can still be a breaking change\ - \ if it is configured on the elasticsearch yml file, for example by changing the threadpool\ - \ size `thread_pool.vectortile.size=8`'" - impact: "In the case the threadpool appears on the yaml file, Elasticsearch will not start until those\ - \ lines are removed." - notable: false diff --git a/docs/changelog/96689.yaml b/docs/changelog/96689.yaml deleted file mode 100644 index 220624b8c1eca..0000000000000 --- a/docs/changelog/96689.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96689 -summary: Use a collector manager in DfsPhase Knn Search -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/96717.yaml b/docs/changelog/96717.yaml deleted file mode 100644 index 8d151d16c2659..0000000000000 --- a/docs/changelog/96717.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96717 -summary: Support type for simple query string -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/96772.yaml b/docs/changelog/96772.yaml deleted file mode 100644 index 6121a7e06e6f5..0000000000000 --- a/docs/changelog/96772.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96772 -summary: Health API Periodic Logging -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/96895.yaml b/docs/changelog/96895.yaml deleted file mode 100644 index 8a68172e7c2d0..0000000000000 --- a/docs/changelog/96895.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96895 -summary: Fix cluster bootstrap warning for single-node discovery -area: Cluster Coordination -type: bug -issues: [96874] diff --git a/docs/changelog/96930.yaml b/docs/changelog/96930.yaml deleted file mode 100644 index e4715091e9874..0000000000000 --- a/docs/changelog/96930.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 96930 -summary: Include more downsampling status statistics -area: TSDB -type: enhancement -issues: - - 96760 diff --git a/docs/changelog/96982.yaml b/docs/changelog/96982.yaml deleted file mode 100644 index 46b0745467c2e..0000000000000 --- a/docs/changelog/96982.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 96982 -summary: "Downsampling: copy the `_tier_preference` setting" -area: Downsampling -type: bug -issues: - - 96733 diff --git a/docs/changelog/96994.yaml b/docs/changelog/96994.yaml deleted file mode 100644 index 472ddd9ebbb20..0000000000000 --- a/docs/changelog/96994.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96994 -summary: Add `node.roles` to cat allocation API -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/97029.yaml b/docs/changelog/97029.yaml deleted file mode 100644 index a4a920b486890..0000000000000 --- a/docs/changelog/97029.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97029 -summary: Improve test coverage for CCS search cancellation and fix response bugs -area: Search -type: bug -issues: [] diff --git a/docs/changelog/97040.yaml b/docs/changelog/97040.yaml deleted file mode 100644 index 983227f00a4c4..0000000000000 --- a/docs/changelog/97040.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97040 -summary: Upgrade Netty to 4.1.94.Final -area: Network -type: upgrade -issues: [] diff --git a/docs/changelog/97041.yaml b/docs/changelog/97041.yaml deleted file mode 100644 index 6bd6f642be26b..0000000000000 --- a/docs/changelog/97041.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97041 -summary: Introduce downsampling configuration for data stream lifecycle -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/97079.yaml b/docs/changelog/97079.yaml deleted file mode 100644 index f24096e771a58..0000000000000 --- a/docs/changelog/97079.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97079 -summary: Enable Serverless API protections dynamically -area: Infra/REST API -type: enhancement -issues: [] diff --git a/docs/changelog/97092.yaml b/docs/changelog/97092.yaml deleted file mode 100644 index d065a7291b5ac..0000000000000 --- a/docs/changelog/97092.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97092 -summary: "Fix `fields` API with `subobjects: false`" -area: Mapping -type: bug -issues: - - 96700 diff --git a/docs/changelog/97110.yaml b/docs/changelog/97110.yaml deleted file mode 100644 index a1e65266c865a..0000000000000 --- a/docs/changelog/97110.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97110 -summary: Copy "index.lifecycle.name" for ILM managed indices -area: Downsampling -type: bug -issues: - - 96732 diff --git a/docs/changelog/97142.yaml b/docs/changelog/97142.yaml deleted file mode 100644 index 30fcbb337ae0c..0000000000000 --- a/docs/changelog/97142.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97142 -summary: The model loading service should not notify listeners in a sync block -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/97159.yaml b/docs/changelog/97159.yaml deleted file mode 100644 index ddd7bb928d7b6..0000000000000 --- a/docs/changelog/97159.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97159 -summary: Improve exists query rewrite -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97203.yaml b/docs/changelog/97203.yaml deleted file mode 100644 index 56d9ddd446b7d..0000000000000 --- a/docs/changelog/97203.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97203 -summary: Fix possible NPE when transportversion is null in `MainResponse` -area: Infra/REST API -type: bug -issues: [] diff --git a/docs/changelog/97208.yaml b/docs/changelog/97208.yaml deleted file mode 100644 index 943df20468651..0000000000000 --- a/docs/changelog/97208.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97208 -summary: Improve match query rewrite -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97209.yaml b/docs/changelog/97209.yaml deleted file mode 100644 index ba6f917e618f7..0000000000000 --- a/docs/changelog/97209.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97209 -summary: Improve prefix query rewrite -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97224.yaml b/docs/changelog/97224.yaml deleted file mode 100644 index 50605bd6ad67f..0000000000000 --- a/docs/changelog/97224.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97224 -summary: Remove exception wrapping in `BatchedRerouteService` -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/97234.yaml b/docs/changelog/97234.yaml deleted file mode 100644 index c4326fcfcc1ca..0000000000000 --- a/docs/changelog/97234.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97234 -summary: Add "operator" field to authenticate response -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/97281.yaml b/docs/changelog/97281.yaml deleted file mode 100644 index 5880ffaa8e93b..0000000000000 --- a/docs/changelog/97281.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97281 -summary: Improve iterating over many field producers during downsample operation -area: Downsampling -type: enhancement -issues: [] diff --git a/docs/changelog/97319.yaml b/docs/changelog/97319.yaml deleted file mode 100644 index 16df7bbd34b22..0000000000000 --- a/docs/changelog/97319.yaml +++ /dev/null @@ -1,32 +0,0 @@ -pr: 97319 -summary: Change pre-configured and cached analyzer components to use IndexVersion instead of Version -area: "Analysis" -type: breaking-java -breaking: - title: Change pre-configured and cached analyzer components to use IndexVersion instead of Version - area: Java API - details: |- - This PR changes the types used to obtain pre-configured components from Version to IndexVersion, - with corresponding changes to method names. - - Prior to 8.10, there is a one-to-one mapping between node version and index version, with corresponding constants - in the IndexVersion class. - Starting in 8.10, IndexVersion is versioned independently of node version, and will be a simple incrementing number. - For more information on how to use IndexVersion and other version types, please see the contributing guide. - impact: Analysis components now take IndexVersion instead of Version - notable: false -issues: [] -highlight: - title: Change pre-configured and cached analyzer components to use IndexVersion instead of Version - body: |- - As part of ongoing refactoring work, we are separating out various component versions into their own types. - For this, we have introduced a new `IndexVersion` type to represent the version of index data and metadata. - - This PR changes the types used to obtain pre-configured components from Version to IndexVersion, - with corresponding changes to method names. - - Prior to 8.10, there is a one-to-one mapping between node version and index version, with corresponding constants - in the IndexVersion class. - Starting in 8.10, IndexVersion is versioned independently of node version, and will be a simple incrementing number. - For more information on how to use IndexVersion and other version types, please see the contributing guide. - notable: false diff --git a/docs/changelog/97332.yaml b/docs/changelog/97332.yaml deleted file mode 100644 index 2dfef559493e2..0000000000000 --- a/docs/changelog/97332.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97332 -summary: Fix edge case for active flag for flush on idle -area: Engine -type: enhancement -issues: - - 97154 diff --git a/docs/changelog/97344.yaml b/docs/changelog/97344.yaml deleted file mode 100644 index 6ebcb34c11588..0000000000000 --- a/docs/changelog/97344.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97344 -summary: Change `MetricFieldProducer#metrics` field type from list to array -area: Downsampling -type: enhancement -issues: [] diff --git a/docs/changelog/97380.yaml b/docs/changelog/97380.yaml deleted file mode 100644 index aab69092a4b88..0000000000000 --- a/docs/changelog/97380.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97380 -summary: "[Profiling] Add initial support for upgrades" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/97387.yaml b/docs/changelog/97387.yaml deleted file mode 100644 index f7eccdbe767d7..0000000000000 --- a/docs/changelog/97387.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97387 -summary: '`ProfileCollectorManager` to support child profile collectors' -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97395.yaml b/docs/changelog/97395.yaml deleted file mode 100644 index 3e32858aa340d..0000000000000 --- a/docs/changelog/97395.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97395 -summary: Refresh tokens without search -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/97410.yaml b/docs/changelog/97410.yaml deleted file mode 100644 index cf2536df59e7e..0000000000000 --- a/docs/changelog/97410.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97410 -summary: Introduce a collector manager for `QueryPhaseCollector` -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97416.yaml b/docs/changelog/97416.yaml deleted file mode 100644 index 079e93ee99b5f..0000000000000 --- a/docs/changelog/97416.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97416 -summary: Enable parallel collection in Dfs phase -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97463.yaml b/docs/changelog/97463.yaml deleted file mode 100644 index df2ce0e474011..0000000000000 --- a/docs/changelog/97463.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97463 -summary: "[Enterprise Search] Add connectors indices and ent-search pipeline" -area: Application -type: feature -issues: [] diff --git a/docs/changelog/97466.yaml b/docs/changelog/97466.yaml deleted file mode 100644 index 5f9e72430bd6d..0000000000000 --- a/docs/changelog/97466.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97466 -summary: Enable Query Rules as technical preview -area: Application -type: feature -issues: [] diff --git a/docs/changelog/97488.yaml b/docs/changelog/97488.yaml deleted file mode 100644 index 2094338d63896..0000000000000 --- a/docs/changelog/97488.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97488 -summary: Limit `_terms_enum` prefix size -area: Search -type: enhancement -issues: - - 96572 diff --git a/docs/changelog/97539.yaml b/docs/changelog/97539.yaml deleted file mode 100644 index b7e3ffd4eeb9e..0000000000000 --- a/docs/changelog/97539.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97539 -summary: "Give all acces to .slo-observability.* indice to kibana user" -area: Authentication -type: enhancement -issues: [] \ No newline at end of file diff --git a/docs/changelog/97540.yaml b/docs/changelog/97540.yaml deleted file mode 100644 index 471400689a1ec..0000000000000 --- a/docs/changelog/97540.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97540 -summary: Make `terminate_after` early termination friendly -area: Search -type: bug -issues: - - 97269 diff --git a/docs/changelog/97550.yaml b/docs/changelog/97550.yaml deleted file mode 100644 index b7e6097f70264..0000000000000 --- a/docs/changelog/97550.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97550 -summary: Introduce a collector manager for `PartialHitCountCollector` -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97557.yaml b/docs/changelog/97557.yaml deleted file mode 100644 index 28e0b9e413964..0000000000000 --- a/docs/changelog/97557.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97557 -summary: Run downsampling using persistent tasks -area: Downsampling -type: enhancement -issues: - - 93582 diff --git a/docs/changelog/97581.yaml b/docs/changelog/97581.yaml deleted file mode 100644 index ada73b7962631..0000000000000 --- a/docs/changelog/97581.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97581 -summary: Add Setting to optionally use mmap for shared cache IO -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/97588.yaml b/docs/changelog/97588.yaml deleted file mode 100644 index e714cb14f77c5..0000000000000 --- a/docs/changelog/97588.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97588 -summary: Fix weird `change_point` bug where all data values are equivalent -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/97591.yaml b/docs/changelog/97591.yaml deleted file mode 100644 index bd188761c6202..0000000000000 --- a/docs/changelog/97591.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97591 -summary: Changing watcher to disable cookies in shared http client -area: Watcher -type: bug -issues: [] diff --git a/docs/changelog/97594.yaml b/docs/changelog/97594.yaml deleted file mode 100644 index 61641fcaf86f2..0000000000000 --- a/docs/changelog/97594.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97594 -summary: Improve wildcard query and terms query rewrite -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97602.yaml b/docs/changelog/97602.yaml deleted file mode 100644 index 0af420167e5ed..0000000000000 --- a/docs/changelog/97602.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97602 -summary: Add tsdb metrics builtin component template -area: "TSDB" -type: enhancement -issues: [] diff --git a/docs/changelog/97630.yaml b/docs/changelog/97630.yaml deleted file mode 100644 index edceb0c51c6b1..0000000000000 --- a/docs/changelog/97630.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97630 -summary: Add an API for managing the settings of Security system indices -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/97700.yaml b/docs/changelog/97700.yaml deleted file mode 100644 index 0e144a22ab39b..0000000000000 --- a/docs/changelog/97700.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97700 -summary: Add `completion_time` time field to `async_search` get and status response -area: Search -type: enhancement -issues: - - 88640 diff --git a/docs/changelog/97703.yaml b/docs/changelog/97703.yaml deleted file mode 100644 index cb9efb0e87c94..0000000000000 --- a/docs/changelog/97703.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97703 -summary: Track `max_score` in collapse when requested -area: Search -type: bug -issues: - - 97653 diff --git a/docs/changelog/97705.yaml b/docs/changelog/97705.yaml deleted file mode 100644 index a9b6c3f79cffb..0000000000000 --- a/docs/changelog/97705.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 97705 -summary: "Fix to stop aggregatable subobjects from being considered multi-fields, to support \ - `\"subobjects\": false` in data frame analytics" -area: Machine Learning -type: bug -issues: - - 88605 diff --git a/docs/changelog/97711.yaml b/docs/changelog/97711.yaml deleted file mode 100644 index e1dc8612091f3..0000000000000 --- a/docs/changelog/97711.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97711 -summary: Release master service task on timeout -area: Cluster Coordination -type: bug -issues: [] diff --git a/docs/changelog/97718.yaml b/docs/changelog/97718.yaml deleted file mode 100644 index f864aa70ae384..0000000000000 --- a/docs/changelog/97718.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97718 -summary: Fix async missing events -area: EQL -type: bug -issues: - - 97644 diff --git a/docs/changelog/97724.yaml b/docs/changelog/97724.yaml deleted file mode 100644 index 78f3812e36cd7..0000000000000 --- a/docs/changelog/97724.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97724 -summary: Mapped field types searchable with doc values -area: TSDB -type: bug -issues: [] diff --git a/docs/changelog/97726.yaml b/docs/changelog/97726.yaml deleted file mode 100644 index 37f8e966829cd..0000000000000 --- a/docs/changelog/97726.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97726 -summary: Wire `QueryPhaseCollectorManager` into the query phase -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97732.yaml b/docs/changelog/97732.yaml deleted file mode 100644 index d2b9d7cb67fb9..0000000000000 --- a/docs/changelog/97732.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97732 -summary: "[Fleet] Allow `kibana_system` to put datastream lifecycle" -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/97755.yaml b/docs/changelog/97755.yaml deleted file mode 100644 index a2b50ba2f0fc8..0000000000000 --- a/docs/changelog/97755.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97755 -summary: Wire concurrent top docs collector managers when size is 0 -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/97765.yaml b/docs/changelog/97765.yaml deleted file mode 100644 index 67a5c8f72cc70..0000000000000 --- a/docs/changelog/97765.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97765 -summary: Install data stream template for Kibana reporting -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/97773.yaml b/docs/changelog/97773.yaml deleted file mode 100644 index 3106dc2621742..0000000000000 --- a/docs/changelog/97773.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97773 -summary: "[Profiling] Support index migrations" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/97822.yaml b/docs/changelog/97822.yaml deleted file mode 100644 index 1457fb4fad12f..0000000000000 --- a/docs/changelog/97822.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97822 -summary: Allow Lucene directory implementations to estimate their size -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/97850.yaml b/docs/changelog/97850.yaml deleted file mode 100644 index f798c3f5cd404..0000000000000 --- a/docs/changelog/97850.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97850 -summary: Allow custom geo ip database files to be downloaded -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/97869.yaml b/docs/changelog/97869.yaml deleted file mode 100644 index 067ee5c93ba3e..0000000000000 --- a/docs/changelog/97869.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97869 -summary: Add missing sync on `indicesThatCannotBeCreated` -area: CRUD -type: bug -issues: [] diff --git a/docs/changelog/97890.yaml b/docs/changelog/97890.yaml deleted file mode 100644 index 9d2339d04194f..0000000000000 --- a/docs/changelog/97890.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97890 -summary: "[Profiling] Consider static settings in status" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/97899.yaml b/docs/changelog/97899.yaml deleted file mode 100644 index c6ea51c11461a..0000000000000 --- a/docs/changelog/97899.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97899 -summary: Fork response reading in `TransportNodesAction` -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/97920.yaml b/docs/changelog/97920.yaml deleted file mode 100644 index 34cfa8c445424..0000000000000 --- a/docs/changelog/97920.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97920 -summary: Avoid `transport_worker` thread in `TransportBroadcastByNodeAction` -area: Distributed -type: bug -issues: - - 97914 diff --git a/docs/changelog/97922.yaml b/docs/changelog/97922.yaml deleted file mode 100644 index e8fb279f8d291..0000000000000 --- a/docs/changelog/97922.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97922 -summary: Fork remote-cluster response handling -area: Network -type: bug -issues: [] diff --git a/docs/changelog/97961.yaml b/docs/changelog/97961.yaml deleted file mode 100644 index 94bfb3f9bc4d0..0000000000000 --- a/docs/changelog/97961.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97961 -summary: Infrastructure to report upon document parsing -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/97962.yaml b/docs/changelog/97962.yaml deleted file mode 100644 index d4987245bf352..0000000000000 --- a/docs/changelog/97962.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97962 -summary: Introduce Synonyms Management API used for synonym and synonym_graph filters -area: Search -type: enhancement -issues: - - 38523 diff --git a/docs/changelog/97967.yaml b/docs/changelog/97967.yaml deleted file mode 100644 index d14fbf788864e..0000000000000 --- a/docs/changelog/97967.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97967 -summary: Ensure frozen indices have correct tier preference -area: "Indices APIs" -type: bug -issues: [] diff --git a/docs/changelog/97985.yaml b/docs/changelog/97985.yaml deleted file mode 100644 index 6389f6cfc110f..0000000000000 --- a/docs/changelog/97985.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97985 -summary: Adding `ApiFilteringActionFilter` -area: Infra/Plugins -type: enhancement -issues: [] diff --git a/docs/changelog/97992.yaml b/docs/changelog/97992.yaml deleted file mode 100644 index 6f5746c04b852..0000000000000 --- a/docs/changelog/97992.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 97401 -summary: Fix transform incorrectly calculating date bucket on updating old data -area: Transform -type: bug -issues: - - 97101 diff --git a/docs/changelog/98001.yaml b/docs/changelog/98001.yaml deleted file mode 100644 index 2a5a1c13d9741..0000000000000 --- a/docs/changelog/98001.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98001 -summary: Avoid `transport_worker` thread in `TransportBroadcastAction` -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/98051.yaml b/docs/changelog/98051.yaml deleted file mode 100644 index 3c6d6fa974efd..0000000000000 --- a/docs/changelog/98051.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98051 -summary: Mark customer settings for serverless -area: Infra/Settings -type: enhancement -issues: [] diff --git a/docs/changelog/98067.yaml b/docs/changelog/98067.yaml deleted file mode 100644 index 18227a151cf51..0000000000000 --- a/docs/changelog/98067.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98067 -summary: Avoid double get -area: Authentication -type: enhancement -issues: - - 97928 diff --git a/docs/changelog/98083.yaml b/docs/changelog/98083.yaml deleted file mode 100644 index 28f8e0698ff92..0000000000000 --- a/docs/changelog/98083.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98083 -summary: Collect additional object store stats for S3 -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/98085.yaml b/docs/changelog/98085.yaml deleted file mode 100644 index b079ea4b10c07..0000000000000 --- a/docs/changelog/98085.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98085 -summary: Allow `ByteSizeDirectory` to expose their data set sizes -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/98124.yaml b/docs/changelog/98124.yaml deleted file mode 100644 index 01d7e313fff90..0000000000000 --- a/docs/changelog/98124.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98124 -summary: Fork CCS remote-cluster responses -area: Search -type: bug -issues: - - 97997 diff --git a/docs/changelog/98184.yaml b/docs/changelog/98184.yaml deleted file mode 100644 index 6191d92cb5153..0000000000000 --- a/docs/changelog/98184.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98184 -summary: Separating SLM from ILM -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/98187.yaml b/docs/changelog/98187.yaml deleted file mode 100644 index 8163d3a215ad4..0000000000000 --- a/docs/changelog/98187.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98187 -summary: Make `RestController` pluggable -area: Infra/REST API -type: enhancement -issues: [] diff --git a/docs/changelog/98204.yaml b/docs/changelog/98204.yaml deleted file mode 100644 index f7a80e3936d22..0000000000000 --- a/docs/changelog/98204.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 98204 -summary: Introduce executor for concurrent search -area: Search -type: feature -issues: [] -highlight: - title: Enable parallel knn search across segments - body: |- - Elasticsearch has until now performed search sequentially across the - segments within each shard. This change makes knn queries faster on shards - that are made of more than one segment, by rewriting and collecting each - segment in parallel. - notable: true diff --git a/docs/changelog/98206.yaml b/docs/changelog/98206.yaml deleted file mode 100644 index 452f645987e58..0000000000000 --- a/docs/changelog/98206.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98206 -summary: Detect infinite loop in the WordPiece tokenizer -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/98209.yaml b/docs/changelog/98209.yaml deleted file mode 100644 index 7939137429d14..0000000000000 --- a/docs/changelog/98209.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98209 -summary: Fork CCS search-shards handling -area: Search -type: bug -issues: [] diff --git a/docs/changelog/98246.yaml b/docs/changelog/98246.yaml deleted file mode 100644 index 595e97838cbc7..0000000000000 --- a/docs/changelog/98246.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98246 -summary: Read operator privs enabled from Env settings -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/98259.yaml b/docs/changelog/98259.yaml deleted file mode 100644 index 359ec0c6c390c..0000000000000 --- a/docs/changelog/98259.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98259 -summary: Support getting active-only API keys via Get API keys API -area: Security -type: enhancement -issues: - - 97995 diff --git a/docs/changelog/98265.yaml b/docs/changelog/98265.yaml deleted file mode 100644 index bc0f978ed813d..0000000000000 --- a/docs/changelog/98265.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98265 -summary: Do not assign ignored shards -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/98295.yaml b/docs/changelog/98295.yaml deleted file mode 100644 index 43682555bf3f0..0000000000000 --- a/docs/changelog/98295.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98295 -summary: Skip segment for `MatchNoDocsQuery` filters -area: Aggregations -type: bug -issues: - - 94637 diff --git a/docs/changelog/98307.yaml b/docs/changelog/98307.yaml deleted file mode 100644 index f04520c3ec024..0000000000000 --- a/docs/changelog/98307.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 98307 -summary: Beta release for API key based cross-cluster access -area: Security -type: feature -issues: [] -highlight: - title: API key based cross-cluster search and replication (Beta) - body: |- - This <> uses a cross-cluster API key to authenticate - and authorize cross-cluster operations to a remote cluster. It completely separates - network security controls between the local and remote cluster as well as offers - administrators of both the local and the remote cluster fine-grained access controls. - notable: false diff --git a/docs/changelog/98324.yaml b/docs/changelog/98324.yaml deleted file mode 100644 index c91dc57bbb9fa..0000000000000 --- a/docs/changelog/98324.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98324 -summary: Avoid risk of OOM in datafeeds when memory is constrained -area: Machine Learning -type: bug -issues: [89769] diff --git a/docs/changelog/98354.yaml b/docs/changelog/98354.yaml deleted file mode 100644 index f97e3f02b8541..0000000000000 --- a/docs/changelog/98354.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98354 -summary: Improve reliability of elections with message delays -area: Cluster Coordination -type: bug -issues: - - 97909 diff --git a/docs/changelog/98367.yaml b/docs/changelog/98367.yaml deleted file mode 100644 index 2e8240b80745f..0000000000000 --- a/docs/changelog/98367.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98367 -summary: Check ILM status before reporting node migration STALLED -area: Infra/Node Lifecycle -type: enhancement -issues: - - 89486 diff --git a/docs/changelog/98386.yaml b/docs/changelog/98386.yaml deleted file mode 100644 index 14ead4ba721dd..0000000000000 --- a/docs/changelog/98386.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98386 -summary: Make `TransportAddVotingConfigExclusionsAction` retryable -area: Cluster Coordination -type: bug -issues: [] diff --git a/docs/changelog/98414.yaml b/docs/changelog/98414.yaml deleted file mode 100644 index 0a245893676e0..0000000000000 --- a/docs/changelog/98414.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98414 -summary: '`TimeSeriesIndexSearcher` to offload to the provided executor' -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/98420.yaml b/docs/changelog/98420.yaml deleted file mode 100644 index 7f62e500a6fed..0000000000000 --- a/docs/changelog/98420.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98420 -summary: Fix election scheduling after discovery outage -area: Cluster Coordination -type: bug -issues: [] diff --git a/docs/changelog/98425.yaml b/docs/changelog/98425.yaml deleted file mode 100644 index bdd34a3ec8de6..0000000000000 --- a/docs/changelog/98425.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98425 -summary: Add support for concurrent collection when size is greater than zero -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/98455.yaml b/docs/changelog/98455.yaml deleted file mode 100644 index 542706173b378..0000000000000 --- a/docs/changelog/98455.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98455 -summary: Add setting for search parallelism -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/98459.yaml b/docs/changelog/98459.yaml deleted file mode 100644 index 2bd3969324604..0000000000000 --- a/docs/changelog/98459.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98459 -summary: Unwrap IOException in `ContextIndexSearcher` concurrent code-path -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/98508.yaml b/docs/changelog/98508.yaml deleted file mode 100644 index 60290bca6dbe9..0000000000000 --- a/docs/changelog/98508.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98508 -summary: Adding special logic to the disk health check for search-only nodes -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/98692.yaml b/docs/changelog/98692.yaml deleted file mode 100644 index a7b0d04be97f9..0000000000000 --- a/docs/changelog/98692.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98692 -summary: Add request header size limit for RCS transport connections -area: Network -type: enhancement -issues: [] diff --git a/docs/changelog/98808.yaml b/docs/changelog/98808.yaml deleted file mode 100644 index 100b0ec39cbb4..0000000000000 --- a/docs/changelog/98808.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98808 -summary: Set default index mode for `TimeSeries` to `null` -area: Aggregations -type: enhancement -issues: - - 97429 diff --git a/docs/changelog/98828.yaml b/docs/changelog/98828.yaml deleted file mode 100644 index 76c4be1b615bd..0000000000000 --- a/docs/changelog/98828.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98828 -summary: Fix how Maps#flatten handle map values inside a list -area: Geo -type: bug -issues: [] diff --git a/docs/changelog/98864.yaml b/docs/changelog/98864.yaml deleted file mode 100644 index 52f5b1b0ad70a..0000000000000 --- a/docs/changelog/98864.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98864 -summary: "[Profiling] Abort index creation on outdated index" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/98884.yaml b/docs/changelog/98884.yaml deleted file mode 100644 index f1e56845e3557..0000000000000 --- a/docs/changelog/98884.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98884 -summary: "[Profiling] Mark executables without a name" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/98961.yaml b/docs/changelog/98961.yaml deleted file mode 100644 index cf282b4ad7562..0000000000000 --- a/docs/changelog/98961.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98961 -summary: Fix NPE when `GetUser` with profile uid before profile index exists -area: Security -type: bug -issues: [] diff --git a/docs/changelog/98970.yaml b/docs/changelog/98970.yaml deleted file mode 100644 index 2dd5185ef218d..0000000000000 --- a/docs/changelog/98970.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 98970 -summary: Fix `BlobCacheBufferedIndexInput` large read after clone -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/98987.yaml b/docs/changelog/98987.yaml deleted file mode 100644 index b601b934f6133..0000000000000 --- a/docs/changelog/98987.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98987 -summary: EQL and ESQL to use only the necessary fields in the internal `field_caps` - calls -area: EQL -type: enhancement -issues: [] diff --git a/docs/changelog/99007.yaml b/docs/changelog/99007.yaml deleted file mode 100644 index cbcd3438a7c80..0000000000000 --- a/docs/changelog/99007.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99007 -summary: Cardinality nested in time series doc values bug -area: "Aggregations" -type: bug -issues: [] diff --git a/docs/changelog/99111.yaml b/docs/changelog/99111.yaml deleted file mode 100644 index 2c59c977fd096..0000000000000 --- a/docs/changelog/99111.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 99111 -summary: '`_health_report` slm indicator should use the policy id (not the name)' -area: Health -type: bug -issues: [] From 8685c8afe3a90496271ff55bf1425e38a9d88e4b Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 12 Sep 2023 13:38:14 -0500 Subject: [PATCH 022/114] Updating data streams module to use new rest test framework (#99391) --- modules/data-streams/build.gradle | 21 ++++--------------- .../DataStreamsClientYamlTestSuiteIT.java | 18 ++++++++++++++++ 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index c8ba34e8f751d..8acdb0f156af1 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -2,9 +2,9 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.test-with-dependencies' apply plugin: 'elasticsearch.internal-cluster-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { description 'Elasticsearch Expanded Pack Plugin - Data Streams' @@ -22,27 +22,14 @@ dependencies { testImplementation project(path: ':test:test-clusters') } -testClusters.configureEach { - module ':modules:reindex' - testDistribution = 'DEFAULT' - // disable ILM history, since it disturbs tests using _all - setting 'indices.lifecycle.history_index_enabled', 'false' - setting 'xpack.security.enabled', 'true' - keystore 'bootstrap.password', 'x-pack-test-password' - user username: "x_pack_rest_user", password: "x-pack-test-password" +tasks.named('yamlRestTest') { + usesDefaultDistribution() } tasks.named('javaRestTest') { usesDefaultDistribution() } -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'false' - // disable ILM history, since it disturbs tests using _all - setting 'indices.lifecycle.history_index_enabled', 'false' -} - if (BuildParams.inFipsJvm){ // These fail in CI but only when run as part of checkPart2 and not individually. // Tracked in : diff --git a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java index c5029637e6d6d..4a7fa6109f924 100644 --- a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java +++ b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java @@ -12,8 +12,11 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class DataStreamsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -33,4 +36,19 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE).build(); } + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("reindex") + .setting("indices.lifecycle.history_index_enabled", "false") + .setting("xpack.security.enabled", "true") + .keystore("bootstrap.password", "x-pack-test-password") + .user("x_pack_rest_user", "x-pack-test-password") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + } From 255c9a7f951d4eb53f957b0e285ab78408237622 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 12 Sep 2023 14:53:41 -0400 Subject: [PATCH 023/114] [DOCS] Move x-pack docs to `docs/reference` dir (#99209) **Problem:** For historical reasons, source files for the Elasticsearch Guide's security, watcher, and Logstash API docs are housed in the `x-pack/docs` directory. This can confuse new contributors who expect Elasticsearch Guide docs to be located in `docs/reference`. **Solution:** - Move the security, watcher, and Logstash API doc source files to the `docs/reference` directory - Update doc snippet tests to use security Rel: https://github.com/elastic/platform-docs-team/issues/208 --- docs/build.gradle | 252 +++++++++++++++++- docs/reference/ccr/getting-started.asciidoc | 2 +- .../set-up-a-data-stream.asciidoc | 2 +- docs/reference/docs/reindex.asciidoc | 14 +- docs/reference/index.asciidoc | 4 +- .../anomaly-detection/apis/put-job.asciidoc | 2 +- .../apis/put-dfanalytics.asciidoc | 4 +- .../cluster/remote-clusters-api-key.asciidoc | 2 +- .../cluster/remote-clusters-cert.asciidoc | 2 +- docs/reference/rest-api/defs.asciidoc | 2 +- docs/reference/rest-api/index.asciidoc | 6 +- docs/reference/rest-api/info.asciidoc | 2 +- .../logstash/delete-pipeline.asciidoc | 0 .../rest-api/logstash/get-pipeline.asciidoc | 0 .../rest-api/logstash/index.asciidoc | 0 .../rest-api/logstash/put-pipeline.asciidoc | 0 .../reference}/rest-api/security.asciidoc | 0 .../security/activate-user-profile.asciidoc | 0 .../rest-api/security/authenticate.asciidoc | 0 .../security/bulk-update-api-keys.asciidoc | 0 .../security/change-password.asciidoc | 0 .../security/clear-api-key-cache.asciidoc | 0 .../rest-api/security/clear-cache.asciidoc | 0 .../security/clear-privileges-cache.asciidoc | 0 .../security/clear-roles-cache.asciidoc | 0 .../clear-service-token-caches.asciidoc | 0 .../security/create-api-keys.asciidoc | 0 .../create-cross-cluster-api-key.asciidoc | 0 .../security/create-role-mappings.asciidoc | 0 .../rest-api/security/create-roles.asciidoc | 0 .../security/create-service-token.asciidoc | 0 .../rest-api/security/create-users.asciidoc | 0 .../delegate-pki-authentication.asciidoc | 0 .../security/delete-app-privileges.asciidoc | 0 .../security/delete-role-mappings.asciidoc | 0 .../rest-api/security/delete-roles.asciidoc | 0 .../security/delete-service-token.asciidoc | 0 .../rest-api/security/delete-users.asciidoc | 0 .../security/disable-user-profile.asciidoc | 0 .../rest-api/security/disable-users.asciidoc | 0 .../security/enable-user-profile.asciidoc | 0 .../rest-api/security/enable-users.asciidoc | 0 .../rest-api/security/enroll-kibana.asciidoc | 0 .../rest-api/security/enroll-node.asciidoc | 0 .../rest-api/security/get-api-keys.asciidoc | 0 .../security/get-app-privileges.asciidoc | 0 .../security/get-builtin-privileges.asciidoc | 0 .../security/get-role-mappings.asciidoc | 0 .../rest-api/security/get-roles.asciidoc | 0 .../security/get-service-accounts.asciidoc | 0 .../security/get-service-credentials.asciidoc | 0 .../rest-api/security/get-settings.asciidoc | 0 .../rest-api/security/get-tokens.asciidoc | 0 .../security/get-user-privileges.asciidoc | 0 .../security/get-user-profile.asciidoc | 0 .../rest-api/security/get-users.asciidoc | 0 .../rest-api/security/grant-api-keys.asciidoc | 0 .../has-privileges-user-profile.asciidoc | 0 .../rest-api/security/has-privileges.asciidoc | 0 .../security/invalidate-api-keys.asciidoc | 0 .../security/invalidate-tokens.asciidoc | 0 .../security/oidc-authenticate-api.asciidoc | 0 .../security/oidc-logout-api.asciidoc | 0 .../oidc-prepare-authentication-api.asciidoc | 0 .../security/put-app-privileges.asciidoc | 0 .../rest-api/security/query-api-key.asciidoc | 0 .../security/role-mapping-resources.asciidoc | 0 .../security/saml-authenticate-api.asciidoc | 0 .../saml-complete-logout-api.asciidoc | 0 .../security/saml-invalidate-api.asciidoc | 0 .../security/saml-logout-api.asciidoc | 0 .../saml-prepare-authentication-api.asciidoc | 0 .../security/saml-sp-metadata.asciidoc | 0 .../reference}/rest-api/security/ssl.asciidoc | 0 .../security/suggest-user-profile.asciidoc | 0 .../rest-api/security/update-api-key.asciidoc | 0 .../update-cross-cluster-api-key.asciidoc | 0 .../security/update-settings.asciidoc | 0 .../update-user-profile-data.asciidoc | 0 docs/reference/rest-api/usage.asciidoc | 6 +- .../reference}/rest-api/watcher.asciidoc | 0 .../rest-api/watcher/ack-watch.asciidoc | 0 .../rest-api/watcher/activate-watch.asciidoc | 0 .../watcher/deactivate-watch.asciidoc | 0 .../rest-api/watcher/delete-watch.asciidoc | 0 .../rest-api/watcher/execute-watch.asciidoc | 0 .../rest-api/watcher/get-settings.asciidoc | 0 .../rest-api/watcher/get-watch.asciidoc | 0 .../rest-api/watcher/put-watch.asciidoc | 0 .../rest-api/watcher/query-watches.asciidoc | 0 .../rest-api/watcher/start.asciidoc | 0 .../rest-api/watcher/stats.asciidoc | 0 .../reference}/rest-api/watcher/stop.asciidoc | 0 .../rest-api/watcher/update-settings.asciidoc | 0 .../auditing/auditing-search-queries.asciidoc | 0 .../auditing/enable-audit-logging.asciidoc | 0 .../security/auditing/event-types.asciidoc | 0 .../security/auditing/ignore-policy.asciidoc | 0 .../security/auditing/index.asciidoc | 0 .../security/auditing/output-logfile.asciidoc | 0 .../active-directory-realm.asciidoc | 0 .../authentication/anonymous-access.asciidoc | 0 .../authentication/built-in-users.asciidoc | 0 ...onfiguring-active-directory-realm.asciidoc | 0 .../configuring-file-realm.asciidoc | 0 .../configuring-kerberos-realm.asciidoc | 0 .../configuring-ldap-realm.asciidoc | 0 .../configuring-native-realm.asciidoc | 0 .../configuring-pki-realm.asciidoc | 0 .../authentication/custom-realm.asciidoc | 0 .../authentication/file-realm.asciidoc | 0 .../authentication/internal-users.asciidoc | 0 .../authentication/jwt-realm.asciidoc | 0 .../authentication/kerberos-realm.asciidoc | 0 .../authentication/ldap-realm.asciidoc | 0 .../authentication/native-realm.asciidoc | 0 .../authentication/oidc-guide.asciidoc | 0 .../authentication/oidc-realm.asciidoc | 0 .../security/authentication/overview.asciidoc | 0 .../authentication/pki-realm.asciidoc | 0 .../authentication/realm-chains.asciidoc | 0 .../security/authentication/realms.asciidoc | 0 ...emote-clusters-privileges-api-key.asciidoc | 0 .../remote-clusters-privileges-cert.asciidoc | 0 .../authentication/saml-guide.asciidoc | 0 .../authentication/saml-realm.asciidoc | 0 .../authentication/security-domain.asciidoc | 0 .../authentication/service-accounts.asciidoc | 0 .../token-authentication-services.asciidoc | 0 .../authentication/user-cache.asciidoc | 0 .../authentication/user-lookup.asciidoc | 0 .../authentication/user-profile.asciidoc | 0 .../authorization/alias-privileges.asciidoc | 0 .../authorization/built-in-roles.asciidoc | 0 ...figuring-authorization-delegation.asciidoc | 0 .../custom-authorization.asciidoc | 0 .../document-level-security.asciidoc | 0 ...field-and-document-access-control.asciidoc | 0 .../field-level-security.asciidoc | 0 .../authorization/images/authorization.png | Bin .../authorization/managing-roles.asciidoc | 0 .../authorization/mapping-roles.asciidoc | 0 .../security/authorization/overview.asciidoc | 0 .../authorization/privileges.asciidoc | 0 .../authorization/role-restriction.asciidoc | 0 .../authorization/role-templates.asciidoc | 0 .../authorization/run-as-privilege.asciidoc | 0 .../authorization/set-security-user.asciidoc | 0 .../ccs-clients-integrations/hadoop.asciidoc | 0 .../ccs-clients-integrations/http.asciidoc | 0 .../ccs-clients-integrations/index.asciidoc | 0 .../monitoring.asciidoc | 0 .../configuring-stack-security.asciidoc | 0 .../reference}/security/enroll-nodes.asciidoc | 0 .../security/es-security-principles.asciidoc | 0 .../security/fips-140-compliance.asciidoc | 0 .../reference}/security/fips-java17.asciidoc | 0 .../security/images/assign-role.jpg | Bin .../security/images/create-logstash-user.jpg | Bin .../security/images/create-reader-role.jpg | Bin .../security/images/create-user.jpg | Bin .../security/images/create-writer-role.jpg | Bin .../images/elastic-security-overview.png | Bin .../security/images/kibana-login.jpg | Bin .../images/management-builtin-users.jpg | Bin .../security/images/management-roles.jpg | Bin .../security/images/management-users.jpg | Bin .../reference}/security/images/nexus.png | Bin .../reference}/security/index.asciidoc | 0 .../reference}/security/limitations.asciidoc | 0 .../configure-operator-privileges.asciidoc | 0 .../operator-privileges/index.asciidoc | 0 .../operator-only-functionality.asciidoc | 0 ...perator-only-snapshot-and-restore.asciidoc | 0 .../security/reference/files.asciidoc | 0 .../change-passwords-native-users.asciidoc | 0 .../enabling-cipher-suites.asciidoc | 0 .../security-basic-setup-https.asciidoc | 0 .../security-basic-setup.asciidoc | 0 .../security-minimal-setup.asciidoc | 0 .../securing-communications/tls-ad.asciidoc | 0 .../securing-communications/tls-http.asciidoc | 0 .../securing-communications/tls-ldap.asciidoc | 0 .../tls-versions-jdk.asciidoc | 0 .../update-tls-certificates.asciidoc | 0 .../security-manual-configuration.asciidoc | 0 .../security/troubleshooting.asciidoc | 0 .../security/using-ip-filtering.asciidoc | 0 docs/reference/setup/add-nodes.asciidoc | 2 +- .../setup/install/targz-start.asciidoc | 2 +- .../setup/install/zip-windows-start.asciidoc | 2 +- docs/reference/sql/security.asciidoc | 2 +- .../transform/apis/update-transform.asciidoc | 2 +- docs/reference/troubleshooting.asciidoc | 2 +- docs/reference/upgrade.asciidoc | 2 +- .../reference}/watcher/actions.asciidoc | 0 .../reference}/watcher/actions/email.asciidoc | 0 .../reference}/watcher/actions/index.asciidoc | 0 .../reference}/watcher/actions/jira.asciidoc | 0 .../watcher/actions/logging.asciidoc | 0 .../watcher/actions/pagerduty.asciidoc | 0 .../reference}/watcher/actions/slack.asciidoc | 0 .../watcher/actions/webhook.asciidoc | 0 .../reference}/watcher/condition.asciidoc | 0 .../watcher/condition/always.asciidoc | 0 .../watcher/condition/array-compare.asciidoc | 0 .../watcher/condition/compare.asciidoc | 0 .../watcher/condition/never.asciidoc | 0 .../watcher/condition/script.asciidoc | 0 .../watcher/customizing-watches.asciidoc | 0 .../watcher/encrypting-data.asciidoc | 0 .../watcher/example-watches.asciidoc | 0 .../example-watch-clusterstatus.asciidoc | 0 .../watching-time-series-data.asciidoc | 0 .../watcher/getting-started.asciidoc | 0 .../watcher/how-watcher-works.asciidoc | 0 .../watcher/images/action-throttling.jpg | Bin .../images/slack-add-webhook-integration.jpg | Bin .../watcher/images/slack-copy-webhook-url.jpg | Bin .../watcher/images/watch-execution.jpg | Bin .../images/watcher-kibana-dashboard.png | Bin .../reference}/watcher/images/watcher.graffle | Bin .../reference}/watcher/index.asciidoc | 0 .../reference}/watcher/input.asciidoc | 0 .../reference}/watcher/input/chain.asciidoc | 0 .../reference}/watcher/input/http.asciidoc | 0 .../reference}/watcher/input/search.asciidoc | 0 .../reference}/watcher/input/simple.asciidoc | 0 .../watcher/java/ack-watch.asciidoc | 0 .../watcher/java/activate-watch.asciidoc | 0 .../watcher/java/deactivate-watch.asciidoc | 0 .../watcher/java/delete-watch.asciidoc | 0 .../watcher/java/execute-watch.asciidoc | 0 .../watcher/java/get-watch.asciidoc | 0 .../watcher/java/put-watch.asciidoc | 0 .../reference}/watcher/java/service.asciidoc | 0 .../reference}/watcher/java/stats.asciidoc | 0 .../reference}/watcher/limitations.asciidoc | 0 .../watcher/managing-watches.asciidoc | 0 .../reference}/watcher/transform.asciidoc | 0 .../watcher/transform/chain.asciidoc | 0 .../watcher/transform/script.asciidoc | 0 .../watcher/transform/search.asciidoc | 0 .../reference}/watcher/trigger.asciidoc | 0 .../watcher/trigger/schedule.asciidoc | 0 .../watcher/trigger/schedule/cron.asciidoc | 0 .../watcher/trigger/schedule/daily.asciidoc | 0 .../watcher/trigger/schedule/hourly.asciidoc | 0 .../trigger/schedule/interval.asciidoc | 0 .../watcher/trigger/schedule/monthly.asciidoc | 0 .../watcher/trigger/schedule/weekly.asciidoc | 0 .../watcher/trigger/schedule/yearly.asciidoc | 0 .../watcher/troubleshooting.asciidoc | 0 .../smoketest/DocsClientYamlTestSuiteIT.java | 73 +++++ x-pack/docs/build.gradle | 20 +- 255 files changed, 367 insertions(+), 40 deletions(-) rename {x-pack/docs/en => docs/reference}/rest-api/logstash/delete-pipeline.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/logstash/get-pipeline.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/logstash/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/logstash/put-pipeline.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/activate-user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/authenticate.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/bulk-update-api-keys.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/change-password.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/clear-api-key-cache.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/clear-cache.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/clear-privileges-cache.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/clear-roles-cache.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/clear-service-token-caches.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/create-api-keys.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/create-cross-cluster-api-key.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/create-role-mappings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/create-roles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/create-service-token.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/create-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/delegate-pki-authentication.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/delete-app-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/delete-role-mappings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/delete-roles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/delete-service-token.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/delete-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/disable-user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/disable-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/enable-user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/enable-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/enroll-kibana.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/enroll-node.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-api-keys.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-app-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-builtin-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-role-mappings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-roles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-service-accounts.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-service-credentials.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-settings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-tokens.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-user-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/get-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/grant-api-keys.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/has-privileges-user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/has-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/invalidate-api-keys.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/invalidate-tokens.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/oidc-authenticate-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/oidc-logout-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/oidc-prepare-authentication-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/put-app-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/query-api-key.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/role-mapping-resources.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/saml-authenticate-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/saml-complete-logout-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/saml-invalidate-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/saml-logout-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/saml-prepare-authentication-api.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/saml-sp-metadata.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/ssl.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/suggest-user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/update-api-key.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/update-cross-cluster-api-key.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/update-settings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/security/update-user-profile-data.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/ack-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/activate-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/deactivate-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/delete-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/execute-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/get-settings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/get-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/put-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/query-watches.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/start.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/stats.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/stop.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/rest-api/watcher/update-settings.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/auditing/auditing-search-queries.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/auditing/enable-audit-logging.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/auditing/event-types.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/auditing/ignore-policy.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/auditing/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/auditing/output-logfile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/active-directory-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/anonymous-access.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/built-in-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/configuring-active-directory-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/configuring-file-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/configuring-kerberos-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/configuring-ldap-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/configuring-native-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/configuring-pki-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/custom-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/file-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/internal-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/jwt-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/kerberos-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/ldap-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/native-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/oidc-guide.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/oidc-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/overview.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/pki-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/realm-chains.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/realms.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/remote-clusters-privileges-api-key.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/remote-clusters-privileges-cert.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/saml-guide.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/saml-realm.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/security-domain.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/service-accounts.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/token-authentication-services.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/user-cache.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/user-lookup.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authentication/user-profile.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/alias-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/built-in-roles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/configuring-authorization-delegation.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/custom-authorization.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/document-level-security.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/field-and-document-access-control.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/field-level-security.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/images/authorization.png (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/managing-roles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/mapping-roles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/overview.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/role-restriction.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/role-templates.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/run-as-privilege.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/authorization/set-security-user.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/ccs-clients-integrations/hadoop.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/ccs-clients-integrations/http.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/ccs-clients-integrations/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/ccs-clients-integrations/monitoring.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/configuring-stack-security.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/enroll-nodes.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/es-security-principles.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/fips-140-compliance.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/fips-java17.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/images/assign-role.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/create-logstash-user.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/create-reader-role.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/create-user.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/create-writer-role.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/elastic-security-overview.png (100%) rename {x-pack/docs/en => docs/reference}/security/images/kibana-login.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/management-builtin-users.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/management-roles.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/management-users.jpg (100%) rename {x-pack/docs/en => docs/reference}/security/images/nexus.png (100%) rename {x-pack/docs/en => docs/reference}/security/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/limitations.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/operator-privileges/configure-operator-privileges.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/operator-privileges/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/operator-privileges/operator-only-functionality.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/operator-privileges/operator-only-snapshot-and-restore.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/reference/files.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/change-passwords-native-users.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/enabling-cipher-suites.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/security-basic-setup-https.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/security-basic-setup.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/security-minimal-setup.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/tls-ad.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/tls-http.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/tls-ldap.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/tls-versions-jdk.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/securing-communications/update-tls-certificates.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/security-manual-configuration.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/troubleshooting.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/security/using-ip-filtering.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/email.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/jira.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/logging.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/pagerduty.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/slack.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/actions/webhook.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/condition.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/condition/always.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/condition/array-compare.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/condition/compare.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/condition/never.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/condition/script.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/customizing-watches.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/encrypting-data.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/example-watches.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/example-watches/example-watch-clusterstatus.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/example-watches/watching-time-series-data.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/getting-started.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/how-watcher-works.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/images/action-throttling.jpg (100%) rename {x-pack/docs/en => docs/reference}/watcher/images/slack-add-webhook-integration.jpg (100%) rename {x-pack/docs/en => docs/reference}/watcher/images/slack-copy-webhook-url.jpg (100%) rename {x-pack/docs/en => docs/reference}/watcher/images/watch-execution.jpg (100%) rename {x-pack/docs/en => docs/reference}/watcher/images/watcher-kibana-dashboard.png (100%) rename {x-pack/docs/en => docs/reference}/watcher/images/watcher.graffle (100%) rename {x-pack/docs/en => docs/reference}/watcher/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/input.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/input/chain.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/input/http.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/input/search.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/input/simple.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/ack-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/activate-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/deactivate-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/delete-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/execute-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/get-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/put-watch.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/service.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/java/stats.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/limitations.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/managing-watches.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/transform.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/transform/chain.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/transform/script.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/transform/search.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/cron.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/daily.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/hourly.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/interval.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/monthly.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/weekly.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/trigger/schedule/yearly.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/watcher/troubleshooting.asciidoc (100%) diff --git a/docs/build.gradle b/docs/build.gradle index 446bb85e2ac16..33e6cc6080a95 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -24,6 +24,9 @@ ext.docsFileTree = fileTree(projectDir) { exclude 'README.asciidoc' // Broken code snippet tests exclude 'reference/graph/explore.asciidoc' + // These files simply don't pass yet. We should figure out how to fix them. + exclude 'reference/watcher/reference/actions.asciidoc' + exclude 'reference/rest-api/security/ssl.asciidoc' if (BuildParams.inFipsJvm) { // We don't support this component in FIPS 140 exclude 'reference/ingest/processors/attachment.asciidoc' @@ -48,7 +51,13 @@ tasks.named("buildRestTests").configure { 'reference/ml/anomaly-detection/apis/post-data.asciidoc', 'reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc', 'reference/ml/anomaly-detection/apis/update-snapshot.asciidoc', - 'reference/ml/anomaly-detection/apis/update-job.asciidoc' + 'reference/ml/anomaly-detection/apis/update-job.asciidoc', + 'reference/security/authentication/user-cache.asciidoc', + 'reference/security/authorization/run-as-privilege.asciidoc', + 'reference/security/ccs-clients-integrations/http.asciidoc', + 'reference/rest-api/watcher/put-watch.asciidoc', + 'reference/rest-api/watcher/stats.asciidoc', + 'reference/watcher/example-watches/watching-time-series-data.asciidoc' ] } @@ -74,7 +83,6 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { // enable regexes in painless so our tests don't complain about example snippets that use them setting 'script.painless.regex.enabled', 'true' - setting 'xpack.security.enabled', 'false' setting 'path.repo', "${buildDir}/cluster/shared/repo" Closure configFile = { extraConfigFile it, file("src/test/cluster/config/$it") @@ -101,6 +109,42 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + extraConfigFile 'op-jwks.json', project(':x-pack:test:idp-fixture').file("oidc/op-jwks.json") + extraConfigFile 'idp-docs-metadata.xml', project(':x-pack:test:idp-fixture').file("idp/shibboleth-idp/metadata/idp-docs-metadata.xml") + extraConfigFile 'testClient.crt', project(':x-pack:plugin:security').file("src/test/resources/org/elasticsearch/xpack/security/action/pki_delegation/testClient.crt") + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.authc.api_key.enabled', 'true' + setting 'xpack.security.authc.token.enabled', 'true' + // disable the ILM history for doc tests to avoid potential lingering tasks that'd cause test flakiness + setting 'indices.lifecycle.history_index_enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.authc.realms.file.file.order', '0' + setting 'xpack.security.authc.realms.native.native.order', '1' + setting 'xpack.security.authc.realms.oidc.oidc1.order', '2' + setting 'xpack.security.authc.realms.oidc.oidc1.op.issuer', 'http://127.0.0.1:8080' + setting 'xpack.security.authc.realms.oidc.oidc1.op.authorization_endpoint', "http://127.0.0.1:8080/c2id-login" + setting 'xpack.security.authc.realms.oidc.oidc1.op.token_endpoint', "http://127.0.0.1:8080/c2id/token" + setting 'xpack.security.authc.realms.oidc.oidc1.op.jwkset_path', 'op-jwks.json' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.redirect_uri', 'https://my.fantastic.rp/cb' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.client_id', 'elasticsearch-rp' + keystore 'xpack.security.authc.realms.oidc.oidc1.rp.client_secret', 'b07efb7a1cf6ec9462afe7b6d3ab55c6c7880262aa61ac28dded292aca47c9a2' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.response_type', 'id_token' + setting 'xpack.security.authc.realms.oidc.oidc1.claims.principal', 'sub' + setting 'xpack.security.authc.realms.pki.pki1.order', '3' + setting 'xpack.security.authc.realms.pki.pki1.certificate_authorities', '[ "testClient.crt" ]' + setting 'xpack.security.authc.realms.pki.pki1.delegation.enabled', 'true' + setting 'xpack.security.authc.realms.saml.saml1.order', '4' + setting 'xpack.security.authc.realms.saml.saml1.sp.logout', 'https://kibana.org/logout' + setting 'xpack.security.authc.realms.saml.saml1.idp.entity_id', 'https://my-idp.org' + setting 'xpack.security.authc.realms.saml.saml1.idp.metadata.path', 'idp-docs-metadata.xml' + setting 'xpack.security.authc.realms.saml.saml1.sp.entity_id', 'https://kibana.org' + setting 'xpack.security.authc.realms.saml.saml1.sp.acs', 'https://kibana.org/api/security/saml/callback' + setting 'xpack.security.authc.realms.saml.saml1.attributes.principal', 'uid' + setting 'xpack.security.authc.realms.saml.saml1.attributes.name', 'urn:oid:2.5.4.3' + + user username: 'test_admin' + user username: 'test_user' + // build the cluster with all plugins project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> /* Skip repositories. We just aren't going to be able to test them so it @@ -1653,6 +1697,210 @@ setups['setup-snapshots'] = setups['setup-repository'] + ''' body: | #atomic_red_data# ''' + + setups['my_inactive_watch'] = ''' + - do: + watcher.put_watch: + id: "my_watch" + active: false + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test" + } + } + } + } + - match: { _id: "my_watch" } +''' + + setups['my_active_watch'] = setups['my_inactive_watch'].replace( + 'active: false', 'active: true') + + setups['role_mapping'] = ''' + - do: + security.put_role_mapping: + name: "mapping1" + body: > + { + "enabled": true, + "roles": [ "user" ], + "rules": { "field": { "username": "*" } } + } +''' + + setups['admin_role'] = ''' + - do: + security.put_role: + name: "my_admin_role" + body: > + { + "cluster": ["all"], + "indices": [ + {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} + ], + "run_as": [ "other_user" ], + "metadata" : {"version": 1} + } +''' + setups['jacknich_user'] = ''' + - do: + security.put_user: + username: "jacknich" + body: > + { + "password" : "l0ng-r4nd0m-p@ssw0rd", + "roles" : [ "admin", "other_role1" ], + "full_name" : "Jack Nicholson", + "email" : "jacknich@example.com", + "metadata" : { "intelligence" : 7 } + } + - do: + security.activate_user_profile: + body: > + { + "grant_type": "password", + "username": "jacknich", + "password" : "l0ng-r4nd0m-p@ssw0rd" + } +''' + setups['app0102_privileges'] = ''' + - do: + security.put_privileges: + body: > + { + "myapp": { + "read": { + "application": "myapp", + "name": "read", + "actions": [ + "data:read/*", + "action:login" ], + "metadata": { + "description": "Read access to myapp" + } + } + } + } +''' + setups['service_token42'] = ''' + - do: + security.create_service_token: + namespace: elastic + service: fleet-server + name: token42 +''' + setups['user_profiles'] = ''' + - do: + security.put_user: + username: "jacknich" + body: > + { + "password" : "l0ng-r4nd0m-p@ssw0rd", + "roles" : [ "admin", "other_role1" ], + "full_name" : "Jack Nicholson", + "email" : "jacknich@example.com" + } + - do: + security.put_user: + username: "jackrea" + body: > + { + "password" : "l0ng-r4nd0m-p@ssw0rd", + "roles" : [ "admin" ], + "full_name" : "Jack Reacher", + "email" : "jackrea@example.com" + } + - do: + security.put_user: + username: "jackspa" + body: > + { + "password" : "l0ng-r4nd0m-p@ssw0rd", + "roles" : [ "user" ], + "full_name" : "Jack Sparrow", + "email" : "jackspa@example.com" + } + - do: + security.activate_user_profile: + body: > + { + "grant_type": "password", + "username": "jacknich", + "password" : "l0ng-r4nd0m-p@ssw0rd" + } + - do: + security.activate_user_profile: + body: > + { + "grant_type": "password", + "username": "jackrea", + "password" : "l0ng-r4nd0m-p@ssw0rd" + } + - do: + security.activate_user_profile: + body: > + { + "grant_type": "password", + "username": "jackspa", + "password" : "l0ng-r4nd0m-p@ssw0rd" + } + # jacknich + - do: + security.update_user_profile_data: + uid: "u_79HkWkwmnBH5gqFKwoxggWPjEBOur1zLPXQPEl1VBW0_0" + body: > + { + "labels": { + "direction": "north" + }, + "data": { + "app1": { + "key1": "value1" + } + } + } + # jackrea + - do: + security.update_user_profile_data: + uid: "u_P_0BMHgaOK3p7k-PFWUCbw9dQ-UFjt01oWJ_Dp2PmPc_0" + body: > + { + "labels": { + "direction": "west" + } + } + # jackspa + - do: + security.update_user_profile_data: + uid: "u_8RKO7AKfEbSiIHZkZZ2LJy2MUSDPWDr3tMI_CkIGApU_0" + body: > + { + "labels": { + "direction": "south" + } + } +''' + // fake data used by the correlation bucket agg buildRestTests.setups['correlate_latency'] = ''' - do: diff --git a/docs/reference/ccr/getting-started.asciidoc b/docs/reference/ccr/getting-started.asciidoc index 837268fd9eb23..d6c455b510dad 100644 --- a/docs/reference/ccr/getting-started.asciidoc +++ b/docs/reference/ccr/getting-started.asciidoc @@ -159,7 +159,7 @@ cluster with cluster alias `leader`. connected to. ==== -include::../../../x-pack/docs/en/security/authentication/remote-clusters-privileges-cert.asciidoc[tag=configure-ccr-privileges] +include::{es-repo-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[tag=configure-ccr-privileges] [[ccr-getting-started-follower-index]] ==== Create a follower index to replicate a specific index diff --git a/docs/reference/data-streams/set-up-a-data-stream.asciidoc b/docs/reference/data-streams/set-up-a-data-stream.asciidoc index ade63e2ea43d9..144146b897ef0 100644 --- a/docs/reference/data-streams/set-up-a-data-stream.asciidoc +++ b/docs/reference/data-streams/set-up-a-data-stream.asciidoc @@ -261,7 +261,7 @@ PUT _data_stream/my-data-stream [[secure-data-stream]] === Secure the data stream -include::{xes-repo-dir}/security/authorization/alias-privileges.asciidoc[tag=data-stream-security] +include::{es-repo-dir}/security/authorization/alias-privileges.asciidoc[tag=data-stream-security] For an example, see <>. diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 97d77cf91376c..48b055f4e0fa2 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -984,9 +984,9 @@ POST _reindex -------------------------------------------------- // TEST[setup:host] // TEST[s/^/PUT my-index-000001\n/] -// TEST[s/otherhost:9200",/\${host}"/] -// TEST[s/"username": "user",//] -// TEST[s/"password": "pass"//] +// TEST[s/otherhost:9200",/\${host}",/] +// TEST[s/"username": "user",/"username": "test_admin",/] +// TEST[s/"password": "pass"/"password": "x-pack-test-password"/] The `host` parameter must contain a scheme, host, port (e.g. `https://otherhost:9200`), and optional path (e.g. `https://otherhost:9200/proxy`). @@ -1025,7 +1025,7 @@ POST _reindex // TEST[setup:host] // TEST[s/^/PUT my-index-000001\n/] // TEST[s/otherhost:9200",/\${host}",/] -// TEST[s/API_KEY_VALUE" /\93116930-2ecb-4161-aa5e-4f3586c87ac6"/] +// TEST[s/"headers": \{[^}]*\}/"username": "test_admin", "password": "x-pack-test-password"/] Remote hosts have to be explicitly allowed in `elasticsearch.yml` using the `reindex.remote.whitelist` property. It can be set to a comma delimited list @@ -1064,7 +1064,8 @@ POST _reindex { "source": { "remote": { - "host": "http://otherhost:9200" + "host": "http://otherhost:9200", + ... }, "index": "source", "size": 10, @@ -1082,6 +1083,7 @@ POST _reindex // TEST[setup:host] // TEST[s/^/PUT source\n/] // TEST[s/otherhost:9200/\${host}/] +// TEST[s/\.\.\./"username": "test_admin", "password": "x-pack-test-password"/] It is also possible to set the socket read timeout on the remote connection with the `socket_timeout` field and the connection timeout with the @@ -1096,6 +1098,7 @@ POST _reindex "source": { "remote": { "host": "http://otherhost:9200", + ..., "socket_timeout": "1m", "connect_timeout": "10s" }, @@ -1114,6 +1117,7 @@ POST _reindex // TEST[setup:host] // TEST[s/^/PUT source\n/] // TEST[s/otherhost:9200/\${host}/] +// TEST[s/\.\.\.,/"username": "test_admin", "password": "x-pack-test-password",/] [[reindex-ssl]] ===== Configuring SSL parameters diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e3d84c91c4d28..828a3e4d1d01d 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -63,9 +63,9 @@ include::high-availability.asciidoc[] include::snapshot-restore/index.asciidoc[] -include::{xes-repo-dir}/security/index.asciidoc[] +include::security/index.asciidoc[] -include::{xes-repo-dir}/watcher/index.asciidoc[] +include::watcher/index.asciidoc[] include::commands/index.asciidoc[] diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index 134cb98f50390..97120ff1873ae 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -536,4 +536,4 @@ The API returns the following results: ---- // TESTRESPONSE[s/"job_version" : "8.4.0"/"job_version" : $body.job_version/] // TESTRESPONSE[s/1656087283340/$body.$_path/] -// TESTRESPONSE[s/"authorization" : \{[^}]*\},//] +// TESTRESPONSE[s/"superuser"/"_es_test_root"/] diff --git a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc index 2c3ee51b0d8c7..f11166f9c1a60 100644 --- a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc @@ -704,7 +704,7 @@ The API returns the following result: ---- // TESTRESPONSE[s/1656364565517/$body.$_path/] // TESTRESPONSE[s/"version" : "8.4.0"/"version": $body.version/] -// TESTRESPONSE[s/"authorization" : \{[^}]*\},//] +// TESTRESPONSE[s/"superuser"/"_es_test_root"/] [[ml-put-dfanalytics-example-r]] @@ -777,7 +777,7 @@ The API returns the following result: ---- // TESTRESPONSE[s/1656364845151/$body.$_path/] // TESTRESPONSE[s/"version" : "8.4.0"/"version": $body.version/] -// TESTRESPONSE[s/"authorization" : \{[^}]*\},//] +// TESTRESPONSE[s/"superuser"/"_es_test_root"/] // TESTRESPONSE[s/-3578554885299300212/$body.$_path/] diff --git a/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc b/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc index 9451c8ba50aae..a8fb195e7728d 100644 --- a/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc @@ -192,4 +192,4 @@ remote cluster earlier. include::remote-clusters-connect.asciidoc[] :!trust-mechanism: -include::../../../../x-pack/docs/en/security/authentication/remote-clusters-privileges-api-key.asciidoc[leveloffset=+1] \ No newline at end of file +include::{es-repo-dir}/security/authentication/remote-clusters-privileges-api-key.asciidoc[leveloffset=+1] \ No newline at end of file diff --git a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc index 36dbde331f484..11d71955cfe60 100644 --- a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc @@ -80,4 +80,4 @@ generate certificates for all nodes simplifies this task. include::remote-clusters-connect.asciidoc[] :!trust-mechanism: -include::../../../../x-pack/docs/en/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] \ No newline at end of file +include::{es-repo-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] \ No newline at end of file diff --git a/docs/reference/rest-api/defs.asciidoc b/docs/reference/rest-api/defs.asciidoc index 681ab746c193c..a80d4a42eb30d 100644 --- a/docs/reference/rest-api/defs.asciidoc +++ b/docs/reference/rest-api/defs.asciidoc @@ -8,4 +8,4 @@ to {security-features}. * <> -include::{xes-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[] +include::{es-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 59555517e4971..1da39333db43e 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -77,7 +77,7 @@ include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[] include::{es-repo-dir}/ingest/apis/index.asciidoc[] include::info.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] -include::{xes-repo-dir}/rest-api/logstash/index.asciidoc[] +include::{es-repo-dir}/rest-api/logstash/index.asciidoc[] include::{es-repo-dir}/ml/common/apis/index.asciidoc[] include::{es-repo-dir}/ml/anomaly-detection/apis/index.asciidoc[] include::{es-repo-dir}/ml/df-analytics/apis/index.asciidoc[] @@ -92,12 +92,12 @@ include::{es-repo-dir}/scripting/apis/script-apis.asciidoc[] include::{es-repo-dir}/search.asciidoc[] include::{es-repo-dir}/search-application/apis/index.asciidoc[] include::{es-repo-dir}/searchable-snapshots/apis/searchable-snapshots-apis.asciidoc[] -include::{xes-repo-dir}/rest-api/security.asciidoc[] +include::{es-repo-dir}/rest-api/security.asciidoc[] include::{es-repo-dir}/snapshot-restore/apis/snapshot-restore-apis.asciidoc[] include::{es-repo-dir}/slm/apis/slm-api.asciidoc[] include::{es-repo-dir}/sql/apis/sql-apis.asciidoc[] include::{es-repo-dir}/synonyms/apis/synonyms-apis.asciidoc[] include::{es-repo-dir}/transform/apis/index.asciidoc[] include::usage.asciidoc[] -include::{xes-repo-dir}/rest-api/watcher.asciidoc[] +include::{es-repo-dir}/rest-api/watcher.asciidoc[] include::defs.asciidoc[] diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index 31daa86d2c423..ec424ca20d324 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -127,7 +127,7 @@ Example response: }, "security" : { "available" : true, - "enabled" : false + "enabled" : true }, "slm" : { "available" : true, diff --git a/x-pack/docs/en/rest-api/logstash/delete-pipeline.asciidoc b/docs/reference/rest-api/logstash/delete-pipeline.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/logstash/delete-pipeline.asciidoc rename to docs/reference/rest-api/logstash/delete-pipeline.asciidoc diff --git a/x-pack/docs/en/rest-api/logstash/get-pipeline.asciidoc b/docs/reference/rest-api/logstash/get-pipeline.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/logstash/get-pipeline.asciidoc rename to docs/reference/rest-api/logstash/get-pipeline.asciidoc diff --git a/x-pack/docs/en/rest-api/logstash/index.asciidoc b/docs/reference/rest-api/logstash/index.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/logstash/index.asciidoc rename to docs/reference/rest-api/logstash/index.asciidoc diff --git a/x-pack/docs/en/rest-api/logstash/put-pipeline.asciidoc b/docs/reference/rest-api/logstash/put-pipeline.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/logstash/put-pipeline.asciidoc rename to docs/reference/rest-api/logstash/put-pipeline.asciidoc diff --git a/x-pack/docs/en/rest-api/security.asciidoc b/docs/reference/rest-api/security.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security.asciidoc rename to docs/reference/rest-api/security.asciidoc diff --git a/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc b/docs/reference/rest-api/security/activate-user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc rename to docs/reference/rest-api/security/activate-user-profile.asciidoc diff --git a/x-pack/docs/en/rest-api/security/authenticate.asciidoc b/docs/reference/rest-api/security/authenticate.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/authenticate.asciidoc rename to docs/reference/rest-api/security/authenticate.asciidoc diff --git a/x-pack/docs/en/rest-api/security/bulk-update-api-keys.asciidoc b/docs/reference/rest-api/security/bulk-update-api-keys.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/bulk-update-api-keys.asciidoc rename to docs/reference/rest-api/security/bulk-update-api-keys.asciidoc diff --git a/x-pack/docs/en/rest-api/security/change-password.asciidoc b/docs/reference/rest-api/security/change-password.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/change-password.asciidoc rename to docs/reference/rest-api/security/change-password.asciidoc diff --git a/x-pack/docs/en/rest-api/security/clear-api-key-cache.asciidoc b/docs/reference/rest-api/security/clear-api-key-cache.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/clear-api-key-cache.asciidoc rename to docs/reference/rest-api/security/clear-api-key-cache.asciidoc diff --git a/x-pack/docs/en/rest-api/security/clear-cache.asciidoc b/docs/reference/rest-api/security/clear-cache.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/clear-cache.asciidoc rename to docs/reference/rest-api/security/clear-cache.asciidoc diff --git a/x-pack/docs/en/rest-api/security/clear-privileges-cache.asciidoc b/docs/reference/rest-api/security/clear-privileges-cache.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/clear-privileges-cache.asciidoc rename to docs/reference/rest-api/security/clear-privileges-cache.asciidoc diff --git a/x-pack/docs/en/rest-api/security/clear-roles-cache.asciidoc b/docs/reference/rest-api/security/clear-roles-cache.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/clear-roles-cache.asciidoc rename to docs/reference/rest-api/security/clear-roles-cache.asciidoc diff --git a/x-pack/docs/en/rest-api/security/clear-service-token-caches.asciidoc b/docs/reference/rest-api/security/clear-service-token-caches.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/clear-service-token-caches.asciidoc rename to docs/reference/rest-api/security/clear-service-token-caches.asciidoc diff --git a/x-pack/docs/en/rest-api/security/create-api-keys.asciidoc b/docs/reference/rest-api/security/create-api-keys.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/create-api-keys.asciidoc rename to docs/reference/rest-api/security/create-api-keys.asciidoc diff --git a/x-pack/docs/en/rest-api/security/create-cross-cluster-api-key.asciidoc b/docs/reference/rest-api/security/create-cross-cluster-api-key.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/create-cross-cluster-api-key.asciidoc rename to docs/reference/rest-api/security/create-cross-cluster-api-key.asciidoc diff --git a/x-pack/docs/en/rest-api/security/create-role-mappings.asciidoc b/docs/reference/rest-api/security/create-role-mappings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/create-role-mappings.asciidoc rename to docs/reference/rest-api/security/create-role-mappings.asciidoc diff --git a/x-pack/docs/en/rest-api/security/create-roles.asciidoc b/docs/reference/rest-api/security/create-roles.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/create-roles.asciidoc rename to docs/reference/rest-api/security/create-roles.asciidoc diff --git a/x-pack/docs/en/rest-api/security/create-service-token.asciidoc b/docs/reference/rest-api/security/create-service-token.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/create-service-token.asciidoc rename to docs/reference/rest-api/security/create-service-token.asciidoc diff --git a/x-pack/docs/en/rest-api/security/create-users.asciidoc b/docs/reference/rest-api/security/create-users.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/create-users.asciidoc rename to docs/reference/rest-api/security/create-users.asciidoc diff --git a/x-pack/docs/en/rest-api/security/delegate-pki-authentication.asciidoc b/docs/reference/rest-api/security/delegate-pki-authentication.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/delegate-pki-authentication.asciidoc rename to docs/reference/rest-api/security/delegate-pki-authentication.asciidoc diff --git a/x-pack/docs/en/rest-api/security/delete-app-privileges.asciidoc b/docs/reference/rest-api/security/delete-app-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/delete-app-privileges.asciidoc rename to docs/reference/rest-api/security/delete-app-privileges.asciidoc diff --git a/x-pack/docs/en/rest-api/security/delete-role-mappings.asciidoc b/docs/reference/rest-api/security/delete-role-mappings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/delete-role-mappings.asciidoc rename to docs/reference/rest-api/security/delete-role-mappings.asciidoc diff --git a/x-pack/docs/en/rest-api/security/delete-roles.asciidoc b/docs/reference/rest-api/security/delete-roles.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/delete-roles.asciidoc rename to docs/reference/rest-api/security/delete-roles.asciidoc diff --git a/x-pack/docs/en/rest-api/security/delete-service-token.asciidoc b/docs/reference/rest-api/security/delete-service-token.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/delete-service-token.asciidoc rename to docs/reference/rest-api/security/delete-service-token.asciidoc diff --git a/x-pack/docs/en/rest-api/security/delete-users.asciidoc b/docs/reference/rest-api/security/delete-users.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/delete-users.asciidoc rename to docs/reference/rest-api/security/delete-users.asciidoc diff --git a/x-pack/docs/en/rest-api/security/disable-user-profile.asciidoc b/docs/reference/rest-api/security/disable-user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/disable-user-profile.asciidoc rename to docs/reference/rest-api/security/disable-user-profile.asciidoc diff --git a/x-pack/docs/en/rest-api/security/disable-users.asciidoc b/docs/reference/rest-api/security/disable-users.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/disable-users.asciidoc rename to docs/reference/rest-api/security/disable-users.asciidoc diff --git a/x-pack/docs/en/rest-api/security/enable-user-profile.asciidoc b/docs/reference/rest-api/security/enable-user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/enable-user-profile.asciidoc rename to docs/reference/rest-api/security/enable-user-profile.asciidoc diff --git a/x-pack/docs/en/rest-api/security/enable-users.asciidoc b/docs/reference/rest-api/security/enable-users.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/enable-users.asciidoc rename to docs/reference/rest-api/security/enable-users.asciidoc diff --git a/x-pack/docs/en/rest-api/security/enroll-kibana.asciidoc b/docs/reference/rest-api/security/enroll-kibana.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/enroll-kibana.asciidoc rename to docs/reference/rest-api/security/enroll-kibana.asciidoc diff --git a/x-pack/docs/en/rest-api/security/enroll-node.asciidoc b/docs/reference/rest-api/security/enroll-node.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/enroll-node.asciidoc rename to docs/reference/rest-api/security/enroll-node.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-api-keys.asciidoc b/docs/reference/rest-api/security/get-api-keys.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-api-keys.asciidoc rename to docs/reference/rest-api/security/get-api-keys.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-app-privileges.asciidoc b/docs/reference/rest-api/security/get-app-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-app-privileges.asciidoc rename to docs/reference/rest-api/security/get-app-privileges.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-builtin-privileges.asciidoc b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-builtin-privileges.asciidoc rename to docs/reference/rest-api/security/get-builtin-privileges.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-role-mappings.asciidoc b/docs/reference/rest-api/security/get-role-mappings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-role-mappings.asciidoc rename to docs/reference/rest-api/security/get-role-mappings.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-roles.asciidoc b/docs/reference/rest-api/security/get-roles.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-roles.asciidoc rename to docs/reference/rest-api/security/get-roles.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-service-accounts.asciidoc b/docs/reference/rest-api/security/get-service-accounts.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-service-accounts.asciidoc rename to docs/reference/rest-api/security/get-service-accounts.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-service-credentials.asciidoc b/docs/reference/rest-api/security/get-service-credentials.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-service-credentials.asciidoc rename to docs/reference/rest-api/security/get-service-credentials.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-settings.asciidoc b/docs/reference/rest-api/security/get-settings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-settings.asciidoc rename to docs/reference/rest-api/security/get-settings.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-tokens.asciidoc b/docs/reference/rest-api/security/get-tokens.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-tokens.asciidoc rename to docs/reference/rest-api/security/get-tokens.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-user-privileges.asciidoc b/docs/reference/rest-api/security/get-user-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-user-privileges.asciidoc rename to docs/reference/rest-api/security/get-user-privileges.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc b/docs/reference/rest-api/security/get-user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-user-profile.asciidoc rename to docs/reference/rest-api/security/get-user-profile.asciidoc diff --git a/x-pack/docs/en/rest-api/security/get-users.asciidoc b/docs/reference/rest-api/security/get-users.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/get-users.asciidoc rename to docs/reference/rest-api/security/get-users.asciidoc diff --git a/x-pack/docs/en/rest-api/security/grant-api-keys.asciidoc b/docs/reference/rest-api/security/grant-api-keys.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/grant-api-keys.asciidoc rename to docs/reference/rest-api/security/grant-api-keys.asciidoc diff --git a/x-pack/docs/en/rest-api/security/has-privileges-user-profile.asciidoc b/docs/reference/rest-api/security/has-privileges-user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/has-privileges-user-profile.asciidoc rename to docs/reference/rest-api/security/has-privileges-user-profile.asciidoc diff --git a/x-pack/docs/en/rest-api/security/has-privileges.asciidoc b/docs/reference/rest-api/security/has-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/has-privileges.asciidoc rename to docs/reference/rest-api/security/has-privileges.asciidoc diff --git a/x-pack/docs/en/rest-api/security/invalidate-api-keys.asciidoc b/docs/reference/rest-api/security/invalidate-api-keys.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/invalidate-api-keys.asciidoc rename to docs/reference/rest-api/security/invalidate-api-keys.asciidoc diff --git a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc b/docs/reference/rest-api/security/invalidate-tokens.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc rename to docs/reference/rest-api/security/invalidate-tokens.asciidoc diff --git a/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc b/docs/reference/rest-api/security/oidc-authenticate-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc rename to docs/reference/rest-api/security/oidc-authenticate-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc b/docs/reference/rest-api/security/oidc-logout-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc rename to docs/reference/rest-api/security/oidc-logout-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc b/docs/reference/rest-api/security/oidc-prepare-authentication-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc rename to docs/reference/rest-api/security/oidc-prepare-authentication-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/put-app-privileges.asciidoc b/docs/reference/rest-api/security/put-app-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/put-app-privileges.asciidoc rename to docs/reference/rest-api/security/put-app-privileges.asciidoc diff --git a/x-pack/docs/en/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/query-api-key.asciidoc rename to docs/reference/rest-api/security/query-api-key.asciidoc diff --git a/x-pack/docs/en/rest-api/security/role-mapping-resources.asciidoc b/docs/reference/rest-api/security/role-mapping-resources.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/role-mapping-resources.asciidoc rename to docs/reference/rest-api/security/role-mapping-resources.asciidoc diff --git a/x-pack/docs/en/rest-api/security/saml-authenticate-api.asciidoc b/docs/reference/rest-api/security/saml-authenticate-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/saml-authenticate-api.asciidoc rename to docs/reference/rest-api/security/saml-authenticate-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/saml-complete-logout-api.asciidoc b/docs/reference/rest-api/security/saml-complete-logout-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/saml-complete-logout-api.asciidoc rename to docs/reference/rest-api/security/saml-complete-logout-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/saml-invalidate-api.asciidoc b/docs/reference/rest-api/security/saml-invalidate-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/saml-invalidate-api.asciidoc rename to docs/reference/rest-api/security/saml-invalidate-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/saml-logout-api.asciidoc b/docs/reference/rest-api/security/saml-logout-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/saml-logout-api.asciidoc rename to docs/reference/rest-api/security/saml-logout-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/saml-prepare-authentication-api.asciidoc b/docs/reference/rest-api/security/saml-prepare-authentication-api.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/saml-prepare-authentication-api.asciidoc rename to docs/reference/rest-api/security/saml-prepare-authentication-api.asciidoc diff --git a/x-pack/docs/en/rest-api/security/saml-sp-metadata.asciidoc b/docs/reference/rest-api/security/saml-sp-metadata.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/saml-sp-metadata.asciidoc rename to docs/reference/rest-api/security/saml-sp-metadata.asciidoc diff --git a/x-pack/docs/en/rest-api/security/ssl.asciidoc b/docs/reference/rest-api/security/ssl.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/ssl.asciidoc rename to docs/reference/rest-api/security/ssl.asciidoc diff --git a/x-pack/docs/en/rest-api/security/suggest-user-profile.asciidoc b/docs/reference/rest-api/security/suggest-user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/suggest-user-profile.asciidoc rename to docs/reference/rest-api/security/suggest-user-profile.asciidoc diff --git a/x-pack/docs/en/rest-api/security/update-api-key.asciidoc b/docs/reference/rest-api/security/update-api-key.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/update-api-key.asciidoc rename to docs/reference/rest-api/security/update-api-key.asciidoc diff --git a/x-pack/docs/en/rest-api/security/update-cross-cluster-api-key.asciidoc b/docs/reference/rest-api/security/update-cross-cluster-api-key.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/update-cross-cluster-api-key.asciidoc rename to docs/reference/rest-api/security/update-cross-cluster-api-key.asciidoc diff --git a/x-pack/docs/en/rest-api/security/update-settings.asciidoc b/docs/reference/rest-api/security/update-settings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/update-settings.asciidoc rename to docs/reference/rest-api/security/update-settings.asciidoc diff --git a/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc b/docs/reference/rest-api/security/update-user-profile-data.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc rename to docs/reference/rest-api/security/update-user-profile-data.asciidoc diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 14a92eca519f4..9594669b6d41f 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -38,7 +38,7 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] ------------------------------------------------------------ GET /_xpack/usage ------------------------------------------------------------ -// TEST[s/usage/usage?filter_path=-watcher.execution.actions.logging*/] +// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*/] // This response filter removes watcher logging results if they are included // to avoid errors in the CI builds. @@ -47,7 +47,8 @@ GET /_xpack/usage { "security" : { "available" : true, - "enabled" : false + "enabled" : true, + ... }, "monitoring" : { "available" : true, @@ -475,6 +476,7 @@ GET /_xpack/usage } } ------------------------------------------------------------ +// TESTRESPONSE[s/"security" : \{[^\}]*\},/"security" : $body.$_path,/] // TESTRESPONSE[s/"detectors" : \{[^\}]*\},/"detectors" : $body.$_path,/] // TESTRESPONSE[s/"model_size" : \{[^\}]*\},/"model_size" : $body.$_path,/] // TESTRESPONSE[s/"eql" : \{[^\}]*\},/"eql" : $body.$_path,/] diff --git a/x-pack/docs/en/rest-api/watcher.asciidoc b/docs/reference/rest-api/watcher.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher.asciidoc rename to docs/reference/rest-api/watcher.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/ack-watch.asciidoc b/docs/reference/rest-api/watcher/ack-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/ack-watch.asciidoc rename to docs/reference/rest-api/watcher/ack-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/activate-watch.asciidoc b/docs/reference/rest-api/watcher/activate-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/activate-watch.asciidoc rename to docs/reference/rest-api/watcher/activate-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/deactivate-watch.asciidoc b/docs/reference/rest-api/watcher/deactivate-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/deactivate-watch.asciidoc rename to docs/reference/rest-api/watcher/deactivate-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/delete-watch.asciidoc b/docs/reference/rest-api/watcher/delete-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/delete-watch.asciidoc rename to docs/reference/rest-api/watcher/delete-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc b/docs/reference/rest-api/watcher/execute-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc rename to docs/reference/rest-api/watcher/execute-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/get-settings.asciidoc b/docs/reference/rest-api/watcher/get-settings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/get-settings.asciidoc rename to docs/reference/rest-api/watcher/get-settings.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/get-watch.asciidoc b/docs/reference/rest-api/watcher/get-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/get-watch.asciidoc rename to docs/reference/rest-api/watcher/get-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/put-watch.asciidoc b/docs/reference/rest-api/watcher/put-watch.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/put-watch.asciidoc rename to docs/reference/rest-api/watcher/put-watch.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/query-watches.asciidoc b/docs/reference/rest-api/watcher/query-watches.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/query-watches.asciidoc rename to docs/reference/rest-api/watcher/query-watches.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/start.asciidoc b/docs/reference/rest-api/watcher/start.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/start.asciidoc rename to docs/reference/rest-api/watcher/start.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/stats.asciidoc b/docs/reference/rest-api/watcher/stats.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/stats.asciidoc rename to docs/reference/rest-api/watcher/stats.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/stop.asciidoc b/docs/reference/rest-api/watcher/stop.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/stop.asciidoc rename to docs/reference/rest-api/watcher/stop.asciidoc diff --git a/x-pack/docs/en/rest-api/watcher/update-settings.asciidoc b/docs/reference/rest-api/watcher/update-settings.asciidoc similarity index 100% rename from x-pack/docs/en/rest-api/watcher/update-settings.asciidoc rename to docs/reference/rest-api/watcher/update-settings.asciidoc diff --git a/x-pack/docs/en/security/auditing/auditing-search-queries.asciidoc b/docs/reference/security/auditing/auditing-search-queries.asciidoc similarity index 100% rename from x-pack/docs/en/security/auditing/auditing-search-queries.asciidoc rename to docs/reference/security/auditing/auditing-search-queries.asciidoc diff --git a/x-pack/docs/en/security/auditing/enable-audit-logging.asciidoc b/docs/reference/security/auditing/enable-audit-logging.asciidoc similarity index 100% rename from x-pack/docs/en/security/auditing/enable-audit-logging.asciidoc rename to docs/reference/security/auditing/enable-audit-logging.asciidoc diff --git a/x-pack/docs/en/security/auditing/event-types.asciidoc b/docs/reference/security/auditing/event-types.asciidoc similarity index 100% rename from x-pack/docs/en/security/auditing/event-types.asciidoc rename to docs/reference/security/auditing/event-types.asciidoc diff --git a/x-pack/docs/en/security/auditing/ignore-policy.asciidoc b/docs/reference/security/auditing/ignore-policy.asciidoc similarity index 100% rename from x-pack/docs/en/security/auditing/ignore-policy.asciidoc rename to docs/reference/security/auditing/ignore-policy.asciidoc diff --git a/x-pack/docs/en/security/auditing/index.asciidoc b/docs/reference/security/auditing/index.asciidoc similarity index 100% rename from x-pack/docs/en/security/auditing/index.asciidoc rename to docs/reference/security/auditing/index.asciidoc diff --git a/x-pack/docs/en/security/auditing/output-logfile.asciidoc b/docs/reference/security/auditing/output-logfile.asciidoc similarity index 100% rename from x-pack/docs/en/security/auditing/output-logfile.asciidoc rename to docs/reference/security/auditing/output-logfile.asciidoc diff --git a/x-pack/docs/en/security/authentication/active-directory-realm.asciidoc b/docs/reference/security/authentication/active-directory-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/active-directory-realm.asciidoc rename to docs/reference/security/authentication/active-directory-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/anonymous-access.asciidoc b/docs/reference/security/authentication/anonymous-access.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/anonymous-access.asciidoc rename to docs/reference/security/authentication/anonymous-access.asciidoc diff --git a/x-pack/docs/en/security/authentication/built-in-users.asciidoc b/docs/reference/security/authentication/built-in-users.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/built-in-users.asciidoc rename to docs/reference/security/authentication/built-in-users.asciidoc diff --git a/x-pack/docs/en/security/authentication/configuring-active-directory-realm.asciidoc b/docs/reference/security/authentication/configuring-active-directory-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/configuring-active-directory-realm.asciidoc rename to docs/reference/security/authentication/configuring-active-directory-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/configuring-file-realm.asciidoc b/docs/reference/security/authentication/configuring-file-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/configuring-file-realm.asciidoc rename to docs/reference/security/authentication/configuring-file-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/configuring-kerberos-realm.asciidoc b/docs/reference/security/authentication/configuring-kerberos-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/configuring-kerberos-realm.asciidoc rename to docs/reference/security/authentication/configuring-kerberos-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/configuring-ldap-realm.asciidoc b/docs/reference/security/authentication/configuring-ldap-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/configuring-ldap-realm.asciidoc rename to docs/reference/security/authentication/configuring-ldap-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/configuring-native-realm.asciidoc b/docs/reference/security/authentication/configuring-native-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/configuring-native-realm.asciidoc rename to docs/reference/security/authentication/configuring-native-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/configuring-pki-realm.asciidoc b/docs/reference/security/authentication/configuring-pki-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/configuring-pki-realm.asciidoc rename to docs/reference/security/authentication/configuring-pki-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/custom-realm.asciidoc b/docs/reference/security/authentication/custom-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/custom-realm.asciidoc rename to docs/reference/security/authentication/custom-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/file-realm.asciidoc b/docs/reference/security/authentication/file-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/file-realm.asciidoc rename to docs/reference/security/authentication/file-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/internal-users.asciidoc b/docs/reference/security/authentication/internal-users.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/internal-users.asciidoc rename to docs/reference/security/authentication/internal-users.asciidoc diff --git a/x-pack/docs/en/security/authentication/jwt-realm.asciidoc b/docs/reference/security/authentication/jwt-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/jwt-realm.asciidoc rename to docs/reference/security/authentication/jwt-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/kerberos-realm.asciidoc b/docs/reference/security/authentication/kerberos-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/kerberos-realm.asciidoc rename to docs/reference/security/authentication/kerberos-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/ldap-realm.asciidoc b/docs/reference/security/authentication/ldap-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/ldap-realm.asciidoc rename to docs/reference/security/authentication/ldap-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/native-realm.asciidoc b/docs/reference/security/authentication/native-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/native-realm.asciidoc rename to docs/reference/security/authentication/native-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/oidc-guide.asciidoc b/docs/reference/security/authentication/oidc-guide.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/oidc-guide.asciidoc rename to docs/reference/security/authentication/oidc-guide.asciidoc diff --git a/x-pack/docs/en/security/authentication/oidc-realm.asciidoc b/docs/reference/security/authentication/oidc-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/oidc-realm.asciidoc rename to docs/reference/security/authentication/oidc-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/overview.asciidoc b/docs/reference/security/authentication/overview.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/overview.asciidoc rename to docs/reference/security/authentication/overview.asciidoc diff --git a/x-pack/docs/en/security/authentication/pki-realm.asciidoc b/docs/reference/security/authentication/pki-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/pki-realm.asciidoc rename to docs/reference/security/authentication/pki-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/realm-chains.asciidoc b/docs/reference/security/authentication/realm-chains.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/realm-chains.asciidoc rename to docs/reference/security/authentication/realm-chains.asciidoc diff --git a/x-pack/docs/en/security/authentication/realms.asciidoc b/docs/reference/security/authentication/realms.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/realms.asciidoc rename to docs/reference/security/authentication/realms.asciidoc diff --git a/x-pack/docs/en/security/authentication/remote-clusters-privileges-api-key.asciidoc b/docs/reference/security/authentication/remote-clusters-privileges-api-key.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/remote-clusters-privileges-api-key.asciidoc rename to docs/reference/security/authentication/remote-clusters-privileges-api-key.asciidoc diff --git a/x-pack/docs/en/security/authentication/remote-clusters-privileges-cert.asciidoc b/docs/reference/security/authentication/remote-clusters-privileges-cert.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/remote-clusters-privileges-cert.asciidoc rename to docs/reference/security/authentication/remote-clusters-privileges-cert.asciidoc diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/docs/reference/security/authentication/saml-guide.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/saml-guide.asciidoc rename to docs/reference/security/authentication/saml-guide.asciidoc diff --git a/x-pack/docs/en/security/authentication/saml-realm.asciidoc b/docs/reference/security/authentication/saml-realm.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/saml-realm.asciidoc rename to docs/reference/security/authentication/saml-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/security-domain.asciidoc b/docs/reference/security/authentication/security-domain.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/security-domain.asciidoc rename to docs/reference/security/authentication/security-domain.asciidoc diff --git a/x-pack/docs/en/security/authentication/service-accounts.asciidoc b/docs/reference/security/authentication/service-accounts.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/service-accounts.asciidoc rename to docs/reference/security/authentication/service-accounts.asciidoc diff --git a/x-pack/docs/en/security/authentication/token-authentication-services.asciidoc b/docs/reference/security/authentication/token-authentication-services.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/token-authentication-services.asciidoc rename to docs/reference/security/authentication/token-authentication-services.asciidoc diff --git a/x-pack/docs/en/security/authentication/user-cache.asciidoc b/docs/reference/security/authentication/user-cache.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/user-cache.asciidoc rename to docs/reference/security/authentication/user-cache.asciidoc diff --git a/x-pack/docs/en/security/authentication/user-lookup.asciidoc b/docs/reference/security/authentication/user-lookup.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/user-lookup.asciidoc rename to docs/reference/security/authentication/user-lookup.asciidoc diff --git a/x-pack/docs/en/security/authentication/user-profile.asciidoc b/docs/reference/security/authentication/user-profile.asciidoc similarity index 100% rename from x-pack/docs/en/security/authentication/user-profile.asciidoc rename to docs/reference/security/authentication/user-profile.asciidoc diff --git a/x-pack/docs/en/security/authorization/alias-privileges.asciidoc b/docs/reference/security/authorization/alias-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/alias-privileges.asciidoc rename to docs/reference/security/authorization/alias-privileges.asciidoc diff --git a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/built-in-roles.asciidoc rename to docs/reference/security/authorization/built-in-roles.asciidoc diff --git a/x-pack/docs/en/security/authorization/configuring-authorization-delegation.asciidoc b/docs/reference/security/authorization/configuring-authorization-delegation.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/configuring-authorization-delegation.asciidoc rename to docs/reference/security/authorization/configuring-authorization-delegation.asciidoc diff --git a/x-pack/docs/en/security/authorization/custom-authorization.asciidoc b/docs/reference/security/authorization/custom-authorization.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/custom-authorization.asciidoc rename to docs/reference/security/authorization/custom-authorization.asciidoc diff --git a/x-pack/docs/en/security/authorization/document-level-security.asciidoc b/docs/reference/security/authorization/document-level-security.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/document-level-security.asciidoc rename to docs/reference/security/authorization/document-level-security.asciidoc diff --git a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc b/docs/reference/security/authorization/field-and-document-access-control.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc rename to docs/reference/security/authorization/field-and-document-access-control.asciidoc diff --git a/x-pack/docs/en/security/authorization/field-level-security.asciidoc b/docs/reference/security/authorization/field-level-security.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/field-level-security.asciidoc rename to docs/reference/security/authorization/field-level-security.asciidoc diff --git a/x-pack/docs/en/security/authorization/images/authorization.png b/docs/reference/security/authorization/images/authorization.png similarity index 100% rename from x-pack/docs/en/security/authorization/images/authorization.png rename to docs/reference/security/authorization/images/authorization.png diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/docs/reference/security/authorization/managing-roles.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/managing-roles.asciidoc rename to docs/reference/security/authorization/managing-roles.asciidoc diff --git a/x-pack/docs/en/security/authorization/mapping-roles.asciidoc b/docs/reference/security/authorization/mapping-roles.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/mapping-roles.asciidoc rename to docs/reference/security/authorization/mapping-roles.asciidoc diff --git a/x-pack/docs/en/security/authorization/overview.asciidoc b/docs/reference/security/authorization/overview.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/overview.asciidoc rename to docs/reference/security/authorization/overview.asciidoc diff --git a/x-pack/docs/en/security/authorization/privileges.asciidoc b/docs/reference/security/authorization/privileges.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/privileges.asciidoc rename to docs/reference/security/authorization/privileges.asciidoc diff --git a/x-pack/docs/en/security/authorization/role-restriction.asciidoc b/docs/reference/security/authorization/role-restriction.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/role-restriction.asciidoc rename to docs/reference/security/authorization/role-restriction.asciidoc diff --git a/x-pack/docs/en/security/authorization/role-templates.asciidoc b/docs/reference/security/authorization/role-templates.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/role-templates.asciidoc rename to docs/reference/security/authorization/role-templates.asciidoc diff --git a/x-pack/docs/en/security/authorization/run-as-privilege.asciidoc b/docs/reference/security/authorization/run-as-privilege.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/run-as-privilege.asciidoc rename to docs/reference/security/authorization/run-as-privilege.asciidoc diff --git a/x-pack/docs/en/security/authorization/set-security-user.asciidoc b/docs/reference/security/authorization/set-security-user.asciidoc similarity index 100% rename from x-pack/docs/en/security/authorization/set-security-user.asciidoc rename to docs/reference/security/authorization/set-security-user.asciidoc diff --git a/x-pack/docs/en/security/ccs-clients-integrations/hadoop.asciidoc b/docs/reference/security/ccs-clients-integrations/hadoop.asciidoc similarity index 100% rename from x-pack/docs/en/security/ccs-clients-integrations/hadoop.asciidoc rename to docs/reference/security/ccs-clients-integrations/hadoop.asciidoc diff --git a/x-pack/docs/en/security/ccs-clients-integrations/http.asciidoc b/docs/reference/security/ccs-clients-integrations/http.asciidoc similarity index 100% rename from x-pack/docs/en/security/ccs-clients-integrations/http.asciidoc rename to docs/reference/security/ccs-clients-integrations/http.asciidoc diff --git a/x-pack/docs/en/security/ccs-clients-integrations/index.asciidoc b/docs/reference/security/ccs-clients-integrations/index.asciidoc similarity index 100% rename from x-pack/docs/en/security/ccs-clients-integrations/index.asciidoc rename to docs/reference/security/ccs-clients-integrations/index.asciidoc diff --git a/x-pack/docs/en/security/ccs-clients-integrations/monitoring.asciidoc b/docs/reference/security/ccs-clients-integrations/monitoring.asciidoc similarity index 100% rename from x-pack/docs/en/security/ccs-clients-integrations/monitoring.asciidoc rename to docs/reference/security/ccs-clients-integrations/monitoring.asciidoc diff --git a/x-pack/docs/en/security/configuring-stack-security.asciidoc b/docs/reference/security/configuring-stack-security.asciidoc similarity index 100% rename from x-pack/docs/en/security/configuring-stack-security.asciidoc rename to docs/reference/security/configuring-stack-security.asciidoc diff --git a/x-pack/docs/en/security/enroll-nodes.asciidoc b/docs/reference/security/enroll-nodes.asciidoc similarity index 100% rename from x-pack/docs/en/security/enroll-nodes.asciidoc rename to docs/reference/security/enroll-nodes.asciidoc diff --git a/x-pack/docs/en/security/es-security-principles.asciidoc b/docs/reference/security/es-security-principles.asciidoc similarity index 100% rename from x-pack/docs/en/security/es-security-principles.asciidoc rename to docs/reference/security/es-security-principles.asciidoc diff --git a/x-pack/docs/en/security/fips-140-compliance.asciidoc b/docs/reference/security/fips-140-compliance.asciidoc similarity index 100% rename from x-pack/docs/en/security/fips-140-compliance.asciidoc rename to docs/reference/security/fips-140-compliance.asciidoc diff --git a/x-pack/docs/en/security/fips-java17.asciidoc b/docs/reference/security/fips-java17.asciidoc similarity index 100% rename from x-pack/docs/en/security/fips-java17.asciidoc rename to docs/reference/security/fips-java17.asciidoc diff --git a/x-pack/docs/en/security/images/assign-role.jpg b/docs/reference/security/images/assign-role.jpg similarity index 100% rename from x-pack/docs/en/security/images/assign-role.jpg rename to docs/reference/security/images/assign-role.jpg diff --git a/x-pack/docs/en/security/images/create-logstash-user.jpg b/docs/reference/security/images/create-logstash-user.jpg similarity index 100% rename from x-pack/docs/en/security/images/create-logstash-user.jpg rename to docs/reference/security/images/create-logstash-user.jpg diff --git a/x-pack/docs/en/security/images/create-reader-role.jpg b/docs/reference/security/images/create-reader-role.jpg similarity index 100% rename from x-pack/docs/en/security/images/create-reader-role.jpg rename to docs/reference/security/images/create-reader-role.jpg diff --git a/x-pack/docs/en/security/images/create-user.jpg b/docs/reference/security/images/create-user.jpg similarity index 100% rename from x-pack/docs/en/security/images/create-user.jpg rename to docs/reference/security/images/create-user.jpg diff --git a/x-pack/docs/en/security/images/create-writer-role.jpg b/docs/reference/security/images/create-writer-role.jpg similarity index 100% rename from x-pack/docs/en/security/images/create-writer-role.jpg rename to docs/reference/security/images/create-writer-role.jpg diff --git a/x-pack/docs/en/security/images/elastic-security-overview.png b/docs/reference/security/images/elastic-security-overview.png similarity index 100% rename from x-pack/docs/en/security/images/elastic-security-overview.png rename to docs/reference/security/images/elastic-security-overview.png diff --git a/x-pack/docs/en/security/images/kibana-login.jpg b/docs/reference/security/images/kibana-login.jpg similarity index 100% rename from x-pack/docs/en/security/images/kibana-login.jpg rename to docs/reference/security/images/kibana-login.jpg diff --git a/x-pack/docs/en/security/images/management-builtin-users.jpg b/docs/reference/security/images/management-builtin-users.jpg similarity index 100% rename from x-pack/docs/en/security/images/management-builtin-users.jpg rename to docs/reference/security/images/management-builtin-users.jpg diff --git a/x-pack/docs/en/security/images/management-roles.jpg b/docs/reference/security/images/management-roles.jpg similarity index 100% rename from x-pack/docs/en/security/images/management-roles.jpg rename to docs/reference/security/images/management-roles.jpg diff --git a/x-pack/docs/en/security/images/management-users.jpg b/docs/reference/security/images/management-users.jpg similarity index 100% rename from x-pack/docs/en/security/images/management-users.jpg rename to docs/reference/security/images/management-users.jpg diff --git a/x-pack/docs/en/security/images/nexus.png b/docs/reference/security/images/nexus.png similarity index 100% rename from x-pack/docs/en/security/images/nexus.png rename to docs/reference/security/images/nexus.png diff --git a/x-pack/docs/en/security/index.asciidoc b/docs/reference/security/index.asciidoc similarity index 100% rename from x-pack/docs/en/security/index.asciidoc rename to docs/reference/security/index.asciidoc diff --git a/x-pack/docs/en/security/limitations.asciidoc b/docs/reference/security/limitations.asciidoc similarity index 100% rename from x-pack/docs/en/security/limitations.asciidoc rename to docs/reference/security/limitations.asciidoc diff --git a/x-pack/docs/en/security/operator-privileges/configure-operator-privileges.asciidoc b/docs/reference/security/operator-privileges/configure-operator-privileges.asciidoc similarity index 100% rename from x-pack/docs/en/security/operator-privileges/configure-operator-privileges.asciidoc rename to docs/reference/security/operator-privileges/configure-operator-privileges.asciidoc diff --git a/x-pack/docs/en/security/operator-privileges/index.asciidoc b/docs/reference/security/operator-privileges/index.asciidoc similarity index 100% rename from x-pack/docs/en/security/operator-privileges/index.asciidoc rename to docs/reference/security/operator-privileges/index.asciidoc diff --git a/x-pack/docs/en/security/operator-privileges/operator-only-functionality.asciidoc b/docs/reference/security/operator-privileges/operator-only-functionality.asciidoc similarity index 100% rename from x-pack/docs/en/security/operator-privileges/operator-only-functionality.asciidoc rename to docs/reference/security/operator-privileges/operator-only-functionality.asciidoc diff --git a/x-pack/docs/en/security/operator-privileges/operator-only-snapshot-and-restore.asciidoc b/docs/reference/security/operator-privileges/operator-only-snapshot-and-restore.asciidoc similarity index 100% rename from x-pack/docs/en/security/operator-privileges/operator-only-snapshot-and-restore.asciidoc rename to docs/reference/security/operator-privileges/operator-only-snapshot-and-restore.asciidoc diff --git a/x-pack/docs/en/security/reference/files.asciidoc b/docs/reference/security/reference/files.asciidoc similarity index 100% rename from x-pack/docs/en/security/reference/files.asciidoc rename to docs/reference/security/reference/files.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/change-passwords-native-users.asciidoc b/docs/reference/security/securing-communications/change-passwords-native-users.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/change-passwords-native-users.asciidoc rename to docs/reference/security/securing-communications/change-passwords-native-users.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/enabling-cipher-suites.asciidoc b/docs/reference/security/securing-communications/enabling-cipher-suites.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/enabling-cipher-suites.asciidoc rename to docs/reference/security/securing-communications/enabling-cipher-suites.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/security-basic-setup-https.asciidoc b/docs/reference/security/securing-communications/security-basic-setup-https.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/security-basic-setup-https.asciidoc rename to docs/reference/security/securing-communications/security-basic-setup-https.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/security-basic-setup.asciidoc b/docs/reference/security/securing-communications/security-basic-setup.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/security-basic-setup.asciidoc rename to docs/reference/security/securing-communications/security-basic-setup.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/security-minimal-setup.asciidoc b/docs/reference/security/securing-communications/security-minimal-setup.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/security-minimal-setup.asciidoc rename to docs/reference/security/securing-communications/security-minimal-setup.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-ad.asciidoc b/docs/reference/security/securing-communications/tls-ad.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-ad.asciidoc rename to docs/reference/security/securing-communications/tls-ad.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-http.asciidoc b/docs/reference/security/securing-communications/tls-http.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-http.asciidoc rename to docs/reference/security/securing-communications/tls-http.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc b/docs/reference/security/securing-communications/tls-ldap.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc rename to docs/reference/security/securing-communications/tls-ldap.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-versions-jdk.asciidoc b/docs/reference/security/securing-communications/tls-versions-jdk.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-versions-jdk.asciidoc rename to docs/reference/security/securing-communications/tls-versions-jdk.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/update-tls-certificates.asciidoc b/docs/reference/security/securing-communications/update-tls-certificates.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/update-tls-certificates.asciidoc rename to docs/reference/security/securing-communications/update-tls-certificates.asciidoc diff --git a/x-pack/docs/en/security/security-manual-configuration.asciidoc b/docs/reference/security/security-manual-configuration.asciidoc similarity index 100% rename from x-pack/docs/en/security/security-manual-configuration.asciidoc rename to docs/reference/security/security-manual-configuration.asciidoc diff --git a/x-pack/docs/en/security/troubleshooting.asciidoc b/docs/reference/security/troubleshooting.asciidoc similarity index 100% rename from x-pack/docs/en/security/troubleshooting.asciidoc rename to docs/reference/security/troubleshooting.asciidoc diff --git a/x-pack/docs/en/security/using-ip-filtering.asciidoc b/docs/reference/security/using-ip-filtering.asciidoc similarity index 100% rename from x-pack/docs/en/security/using-ip-filtering.asciidoc rename to docs/reference/security/using-ip-filtering.asciidoc diff --git a/docs/reference/setup/add-nodes.asciidoc b/docs/reference/setup/add-nodes.asciidoc index 0be2d8c643a9f..e65dd0ba7af1f 100644 --- a/docs/reference/setup/add-nodes.asciidoc +++ b/docs/reference/setup/add-nodes.asciidoc @@ -37,7 +37,7 @@ To add a node to a cluster running on multiple machines, you must also set the rest of its cluster. ==== -include::../../../x-pack/docs/en/security/enroll-nodes.asciidoc[] +include::{es-repo-dir}/security/enroll-nodes.asciidoc[] For more information about discovery and shard allocation, refer to <> and <>. diff --git a/docs/reference/setup/install/targz-start.asciidoc b/docs/reference/setup/install/targz-start.asciidoc index 294f0e1541fbc..79c4131a57030 100644 --- a/docs/reference/setup/install/targz-start.asciidoc +++ b/docs/reference/setup/install/targz-start.asciidoc @@ -53,4 +53,4 @@ symbolic link. :slash: / -include::../../../../x-pack/docs/en/security/enroll-nodes.asciidoc[] \ No newline at end of file +include::{es-repo-dir}/security/enroll-nodes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/setup/install/zip-windows-start.asciidoc b/docs/reference/setup/install/zip-windows-start.asciidoc index 29356d398c808..60edbb9ec704c 100644 --- a/docs/reference/setup/install/zip-windows-start.asciidoc +++ b/docs/reference/setup/install/zip-windows-start.asciidoc @@ -47,4 +47,4 @@ To stop {es}, press `Ctrl-C`. :slash: \ -include::../../../../x-pack/docs/en/security/enroll-nodes.asciidoc[] \ No newline at end of file +include::{es-repo-dir}/security/enroll-nodes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/sql/security.asciidoc b/docs/reference/sql/security.asciidoc index 7847787025910..b57179742700c 100644 --- a/docs/reference/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -44,7 +44,7 @@ APIs to view or edit a role defined in `roles.yml`. This example configures a role that can run SQL in JDBC querying the `test` index: -include::{xes-repo-dir}/rest-api/security/create-roles.asciidoc[tag=sql-queries-permission] +include::{es-repo-dir}/rest-api/security/create-roles.asciidoc[tag=sql-queries-permission] [discrete] [[sql-role-file-example]] diff --git a/docs/reference/transform/apis/update-transform.asciidoc b/docs/reference/transform/apis/update-transform.asciidoc index 8768aaaff1530..57a27b4efae76 100644 --- a/docs/reference/transform/apis/update-transform.asciidoc +++ b/docs/reference/transform/apis/update-transform.asciidoc @@ -341,4 +341,4 @@ When the {transform} is updated, you receive the updated configuration: ---- // TESTRESPONSE[s/"version" : "8.4.0"/"version" : $body.version/] // TESTRESPONSE[s/"create_time" : 1656113450613/"create_time" : $body.create_time/] -// TESTRESPONSE[s/"authorization" : \{[^}]*\},//] +// TESTRESPONSE[s/"superuser"/"_es_test_root"/] diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index edd73ba393c38..e5ad75e048c1b 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -125,7 +125,7 @@ include::monitoring/troubleshooting.asciidoc[] include::transform/troubleshooting.asciidoc[leveloffset=+1] -include::../../x-pack/docs/en/watcher/troubleshooting.asciidoc[] +include::watcher/troubleshooting.asciidoc[] include::troubleshooting/troubleshooting-searches.asciidoc[] diff --git a/docs/reference/upgrade.asciidoc b/docs/reference/upgrade.asciidoc index ecd3b2de72a32..8f6d095971ff1 100644 --- a/docs/reference/upgrade.asciidoc +++ b/docs/reference/upgrade.asciidoc @@ -52,7 +52,7 @@ the REST API. [[upgrade-fips-java17]] === FIPS Compliance and Java 17 -include::{xes-repo-dir}/security/fips-java17.asciidoc[] +include::{es-repo-dir}/security/fips-java17.asciidoc[] include::upgrade/archived-settings.asciidoc[] diff --git a/x-pack/docs/en/watcher/actions.asciidoc b/docs/reference/watcher/actions.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions.asciidoc rename to docs/reference/watcher/actions.asciidoc diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/docs/reference/watcher/actions/email.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/email.asciidoc rename to docs/reference/watcher/actions/email.asciidoc diff --git a/x-pack/docs/en/watcher/actions/index.asciidoc b/docs/reference/watcher/actions/index.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/index.asciidoc rename to docs/reference/watcher/actions/index.asciidoc diff --git a/x-pack/docs/en/watcher/actions/jira.asciidoc b/docs/reference/watcher/actions/jira.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/jira.asciidoc rename to docs/reference/watcher/actions/jira.asciidoc diff --git a/x-pack/docs/en/watcher/actions/logging.asciidoc b/docs/reference/watcher/actions/logging.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/logging.asciidoc rename to docs/reference/watcher/actions/logging.asciidoc diff --git a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc b/docs/reference/watcher/actions/pagerduty.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/pagerduty.asciidoc rename to docs/reference/watcher/actions/pagerduty.asciidoc diff --git a/x-pack/docs/en/watcher/actions/slack.asciidoc b/docs/reference/watcher/actions/slack.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/slack.asciidoc rename to docs/reference/watcher/actions/slack.asciidoc diff --git a/x-pack/docs/en/watcher/actions/webhook.asciidoc b/docs/reference/watcher/actions/webhook.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/actions/webhook.asciidoc rename to docs/reference/watcher/actions/webhook.asciidoc diff --git a/x-pack/docs/en/watcher/condition.asciidoc b/docs/reference/watcher/condition.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/condition.asciidoc rename to docs/reference/watcher/condition.asciidoc diff --git a/x-pack/docs/en/watcher/condition/always.asciidoc b/docs/reference/watcher/condition/always.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/condition/always.asciidoc rename to docs/reference/watcher/condition/always.asciidoc diff --git a/x-pack/docs/en/watcher/condition/array-compare.asciidoc b/docs/reference/watcher/condition/array-compare.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/condition/array-compare.asciidoc rename to docs/reference/watcher/condition/array-compare.asciidoc diff --git a/x-pack/docs/en/watcher/condition/compare.asciidoc b/docs/reference/watcher/condition/compare.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/condition/compare.asciidoc rename to docs/reference/watcher/condition/compare.asciidoc diff --git a/x-pack/docs/en/watcher/condition/never.asciidoc b/docs/reference/watcher/condition/never.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/condition/never.asciidoc rename to docs/reference/watcher/condition/never.asciidoc diff --git a/x-pack/docs/en/watcher/condition/script.asciidoc b/docs/reference/watcher/condition/script.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/condition/script.asciidoc rename to docs/reference/watcher/condition/script.asciidoc diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/docs/reference/watcher/customizing-watches.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/customizing-watches.asciidoc rename to docs/reference/watcher/customizing-watches.asciidoc diff --git a/x-pack/docs/en/watcher/encrypting-data.asciidoc b/docs/reference/watcher/encrypting-data.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/encrypting-data.asciidoc rename to docs/reference/watcher/encrypting-data.asciidoc diff --git a/x-pack/docs/en/watcher/example-watches.asciidoc b/docs/reference/watcher/example-watches.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/example-watches.asciidoc rename to docs/reference/watcher/example-watches.asciidoc diff --git a/x-pack/docs/en/watcher/example-watches/example-watch-clusterstatus.asciidoc b/docs/reference/watcher/example-watches/example-watch-clusterstatus.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/example-watches/example-watch-clusterstatus.asciidoc rename to docs/reference/watcher/example-watches/example-watch-clusterstatus.asciidoc diff --git a/x-pack/docs/en/watcher/example-watches/watching-time-series-data.asciidoc b/docs/reference/watcher/example-watches/watching-time-series-data.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/example-watches/watching-time-series-data.asciidoc rename to docs/reference/watcher/example-watches/watching-time-series-data.asciidoc diff --git a/x-pack/docs/en/watcher/getting-started.asciidoc b/docs/reference/watcher/getting-started.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/getting-started.asciidoc rename to docs/reference/watcher/getting-started.asciidoc diff --git a/x-pack/docs/en/watcher/how-watcher-works.asciidoc b/docs/reference/watcher/how-watcher-works.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/how-watcher-works.asciidoc rename to docs/reference/watcher/how-watcher-works.asciidoc diff --git a/x-pack/docs/en/watcher/images/action-throttling.jpg b/docs/reference/watcher/images/action-throttling.jpg similarity index 100% rename from x-pack/docs/en/watcher/images/action-throttling.jpg rename to docs/reference/watcher/images/action-throttling.jpg diff --git a/x-pack/docs/en/watcher/images/slack-add-webhook-integration.jpg b/docs/reference/watcher/images/slack-add-webhook-integration.jpg similarity index 100% rename from x-pack/docs/en/watcher/images/slack-add-webhook-integration.jpg rename to docs/reference/watcher/images/slack-add-webhook-integration.jpg diff --git a/x-pack/docs/en/watcher/images/slack-copy-webhook-url.jpg b/docs/reference/watcher/images/slack-copy-webhook-url.jpg similarity index 100% rename from x-pack/docs/en/watcher/images/slack-copy-webhook-url.jpg rename to docs/reference/watcher/images/slack-copy-webhook-url.jpg diff --git a/x-pack/docs/en/watcher/images/watch-execution.jpg b/docs/reference/watcher/images/watch-execution.jpg similarity index 100% rename from x-pack/docs/en/watcher/images/watch-execution.jpg rename to docs/reference/watcher/images/watch-execution.jpg diff --git a/x-pack/docs/en/watcher/images/watcher-kibana-dashboard.png b/docs/reference/watcher/images/watcher-kibana-dashboard.png similarity index 100% rename from x-pack/docs/en/watcher/images/watcher-kibana-dashboard.png rename to docs/reference/watcher/images/watcher-kibana-dashboard.png diff --git a/x-pack/docs/en/watcher/images/watcher.graffle b/docs/reference/watcher/images/watcher.graffle similarity index 100% rename from x-pack/docs/en/watcher/images/watcher.graffle rename to docs/reference/watcher/images/watcher.graffle diff --git a/x-pack/docs/en/watcher/index.asciidoc b/docs/reference/watcher/index.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/index.asciidoc rename to docs/reference/watcher/index.asciidoc diff --git a/x-pack/docs/en/watcher/input.asciidoc b/docs/reference/watcher/input.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/input.asciidoc rename to docs/reference/watcher/input.asciidoc diff --git a/x-pack/docs/en/watcher/input/chain.asciidoc b/docs/reference/watcher/input/chain.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/input/chain.asciidoc rename to docs/reference/watcher/input/chain.asciidoc diff --git a/x-pack/docs/en/watcher/input/http.asciidoc b/docs/reference/watcher/input/http.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/input/http.asciidoc rename to docs/reference/watcher/input/http.asciidoc diff --git a/x-pack/docs/en/watcher/input/search.asciidoc b/docs/reference/watcher/input/search.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/input/search.asciidoc rename to docs/reference/watcher/input/search.asciidoc diff --git a/x-pack/docs/en/watcher/input/simple.asciidoc b/docs/reference/watcher/input/simple.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/input/simple.asciidoc rename to docs/reference/watcher/input/simple.asciidoc diff --git a/x-pack/docs/en/watcher/java/ack-watch.asciidoc b/docs/reference/watcher/java/ack-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/ack-watch.asciidoc rename to docs/reference/watcher/java/ack-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/activate-watch.asciidoc b/docs/reference/watcher/java/activate-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/activate-watch.asciidoc rename to docs/reference/watcher/java/activate-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/deactivate-watch.asciidoc b/docs/reference/watcher/java/deactivate-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/deactivate-watch.asciidoc rename to docs/reference/watcher/java/deactivate-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/delete-watch.asciidoc b/docs/reference/watcher/java/delete-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/delete-watch.asciidoc rename to docs/reference/watcher/java/delete-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/execute-watch.asciidoc b/docs/reference/watcher/java/execute-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/execute-watch.asciidoc rename to docs/reference/watcher/java/execute-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/get-watch.asciidoc b/docs/reference/watcher/java/get-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/get-watch.asciidoc rename to docs/reference/watcher/java/get-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/put-watch.asciidoc b/docs/reference/watcher/java/put-watch.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/put-watch.asciidoc rename to docs/reference/watcher/java/put-watch.asciidoc diff --git a/x-pack/docs/en/watcher/java/service.asciidoc b/docs/reference/watcher/java/service.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/service.asciidoc rename to docs/reference/watcher/java/service.asciidoc diff --git a/x-pack/docs/en/watcher/java/stats.asciidoc b/docs/reference/watcher/java/stats.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/java/stats.asciidoc rename to docs/reference/watcher/java/stats.asciidoc diff --git a/x-pack/docs/en/watcher/limitations.asciidoc b/docs/reference/watcher/limitations.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/limitations.asciidoc rename to docs/reference/watcher/limitations.asciidoc diff --git a/x-pack/docs/en/watcher/managing-watches.asciidoc b/docs/reference/watcher/managing-watches.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/managing-watches.asciidoc rename to docs/reference/watcher/managing-watches.asciidoc diff --git a/x-pack/docs/en/watcher/transform.asciidoc b/docs/reference/watcher/transform.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/transform.asciidoc rename to docs/reference/watcher/transform.asciidoc diff --git a/x-pack/docs/en/watcher/transform/chain.asciidoc b/docs/reference/watcher/transform/chain.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/transform/chain.asciidoc rename to docs/reference/watcher/transform/chain.asciidoc diff --git a/x-pack/docs/en/watcher/transform/script.asciidoc b/docs/reference/watcher/transform/script.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/transform/script.asciidoc rename to docs/reference/watcher/transform/script.asciidoc diff --git a/x-pack/docs/en/watcher/transform/search.asciidoc b/docs/reference/watcher/transform/search.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/transform/search.asciidoc rename to docs/reference/watcher/transform/search.asciidoc diff --git a/x-pack/docs/en/watcher/trigger.asciidoc b/docs/reference/watcher/trigger.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger.asciidoc rename to docs/reference/watcher/trigger.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule.asciidoc b/docs/reference/watcher/trigger/schedule.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule.asciidoc rename to docs/reference/watcher/trigger/schedule.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc b/docs/reference/watcher/trigger/schedule/cron.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc rename to docs/reference/watcher/trigger/schedule/cron.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc b/docs/reference/watcher/trigger/schedule/daily.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc rename to docs/reference/watcher/trigger/schedule/daily.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc b/docs/reference/watcher/trigger/schedule/hourly.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc rename to docs/reference/watcher/trigger/schedule/hourly.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc b/docs/reference/watcher/trigger/schedule/interval.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc rename to docs/reference/watcher/trigger/schedule/interval.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc b/docs/reference/watcher/trigger/schedule/monthly.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc rename to docs/reference/watcher/trigger/schedule/monthly.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc b/docs/reference/watcher/trigger/schedule/weekly.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc rename to docs/reference/watcher/trigger/schedule/weekly.asciidoc diff --git a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc b/docs/reference/watcher/trigger/schedule/yearly.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc rename to docs/reference/watcher/trigger/schedule/yearly.asciidoc diff --git a/x-pack/docs/en/watcher/troubleshooting.asciidoc b/docs/reference/watcher/troubleshooting.asciidoc similarity index 100% rename from x-pack/docs/en/watcher/troubleshooting.asciidoc rename to docs/reference/watcher/troubleshooting.asciidoc diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 3754ca0339989..36e3a2cb5e2a9 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -19,8 +19,11 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Strings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlDocsTestClient; @@ -46,10 +49,12 @@ import java.util.List; import java.util.Map; +import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.hamcrest.Matchers.is; //The default 20 minutes timeout isn't always enough, but Darwin CI hosts are incredibly slow... @TimeoutSuite(millis = 40 * TimeUnits.MINUTE) @@ -222,6 +227,74 @@ protected boolean isXpackInfoTest() { return testName != null && (testName.contains("/info/") || testName.contains("\\info\\")); } + private static final String USER_TOKEN = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); + + /** + * All tests run as a an administrative user but use es-shield-runas-user to become a less privileged user. + */ + @Override + protected Settings restClientSettings() { + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", USER_TOKEN).build(); + } + + /** + * Deletes users after every test just in case any test adds any. + */ + @After + public void deleteUsers() throws Exception { + ClientYamlTestResponse response = getAdminExecutionContext().callApi("security.get_user", emptyMap(), emptyList(), emptyMap()); + @SuppressWarnings("unchecked") + Map users = (Map) response.getBody(); + for (String user : users.keySet()) { + Map metadataMap = (Map) ((Map) users.get(user)).get("metadata"); + Boolean reserved = metadataMap == null ? null : (Boolean) metadataMap.get("_reserved"); + if (reserved == null || reserved == false) { + logger.warn("Deleting leftover user {}", user); + getAdminExecutionContext().callApi("security.delete_user", singletonMap("username", user), emptyList(), emptyMap()); + } + } + } + + /** + * Re-enables watcher after every test just in case any test disables it. + */ + @After + public void reenableWatcher() throws Exception { + if (isWatcherTest()) { + assertBusy(() -> { + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); + String state = (String) response.evaluate("stats.0.watcher_state"); + + switch (state) { + case "stopped": + ClientYamlTestResponse startResponse = getAdminExecutionContext().callApi( + "watcher.start", + emptyMap(), + emptyList(), + emptyMap() + ); + boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); + throw new AssertionError("waiting until stopped state reached started state"); + case "stopping": + throw new AssertionError("waiting until stopping state reached stopped state to start again"); + case "starting": + throw new AssertionError("waiting until starting state reached started state"); + case "started": + // all good here, we are done + break; + default: + throw new AssertionError("unknown state[" + state + "]"); + } + }); + } + } + + protected boolean isWatcherTest() { + String testName = getTestName(); + return testName != null && (testName.contains("watcher/") || testName.contains("watcher\\")); + } + /** * Compares the results of running two analyzers against many random * strings. The goal is to figure out if two anlayzers are "the same" by diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 7ae1ec73d32c2..f3624c2b41894 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -5,16 +5,16 @@ apply plugin: 'elasticsearch.rest-resources' * `// CONSOLE` and `// TESTRESPONSE` but have yet to be converted. Try and * only remove entries from this list. When it is empty we'll remove it * entirely and have a party! There will be cake and everything.... */ -tasks.named("buildRestTests").configure { - expectedUnconvertedCandidates = [ - 'en/rest-api/watcher/put-watch.asciidoc', - 'en/security/authentication/user-cache.asciidoc', - 'en/security/authorization/run-as-privilege.asciidoc', - 'en/security/ccs-clients-integrations/http.asciidoc', - 'en/rest-api/watcher/stats.asciidoc', - 'en/watcher/example-watches/watching-time-series-data.asciidoc', - ] -} +// tasks.named("buildRestTests").configure { +// expectedUnconvertedCandidates = [ +// 'en/rest-api/watcher/put-watch.asciidoc', +// 'en/security/authentication/user-cache.asciidoc', +// 'en/security/authorization/run-as-privilege.asciidoc', +// 'en/security/ccs-clients-integrations/http.asciidoc', +// 'en/rest-api/watcher/stats.asciidoc', +// 'en/watcher/example-watches/watching-time-series-data.asciidoc', +// ] +// } dependencies { yamlRestTestImplementation(testArtifact(project(xpackModule('core')))) From daddca75d1888d8b2ba526bf19c7e3dffb9b5458 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 12 Sep 2023 15:53:55 -0400 Subject: [PATCH 024/114] [DOCS] Fix Usage API snippet test (#99497) --- docs/reference/rest-api/usage.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 9594669b6d41f..99e432eb07e1c 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -38,7 +38,7 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] ------------------------------------------------------------ GET /_xpack/usage ------------------------------------------------------------ -// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*/] +// TEST[s/usage/usage?filter_path=-watcher.execution.actions.index*\,-watcher.execution.actions.logging*,-watcher.execution.actions.email*/] // This response filter removes watcher logging results if they are included // to avoid errors in the CI builds. From 7f92a3df82dd39619112d09c6a0262c6e79ec9f2 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 12 Sep 2023 15:54:43 -0400 Subject: [PATCH 025/114] [ESQL] Test that we don't return NaNs or Infinites (#99489) Relates to #98698 Add asserts that we aren't returning NaN or Infinite values from scalar functions. I also fixed the functions that already had test cases returning NaN/Infinite values. Note that this doesn't fix all our scalar functions, as some may just not have test cases that hit these outputs regularly, which is why this PR doesn't close the above issue. --- .../src/main/resources/floats.csv-spec | 12 +++-- .../src/main/resources/math.csv-spec | 8 +-- .../function/scalar/math/AcosEvaluator.java | 28 ++++++++--- .../function/scalar/math/AsinEvaluator.java | 28 ++++++++--- .../function/scalar/math/CoshEvaluator.java | 28 ++++++++--- .../function/scalar/math/SinhEvaluator.java | 28 ++++++++--- .../scalar/math/SqrtDoubleEvaluator.java | 28 ++++++++--- .../scalar/math/SqrtIntEvaluator.java | 29 ++++++++--- .../scalar/math/SqrtLongEvaluator.java | 29 ++++++++--- .../expression/function/scalar/math/Acos.java | 7 ++- .../expression/function/scalar/math/Asin.java | 7 ++- .../expression/function/scalar/math/Cosh.java | 10 ++-- .../expression/function/scalar/math/Sinh.java | 10 ++-- .../expression/function/scalar/math/Sqrt.java | 21 +++++--- .../function/AbstractFunctionTestCase.java | 4 ++ .../expression/function/TestCaseSupplier.java | 12 ++--- .../function/scalar/math/AcosTests.java | 39 ++++++++++++--- .../function/scalar/math/AsinTests.java | 39 ++++++++++++--- .../function/scalar/math/CoshTests.java | 35 +++++++++++-- .../function/scalar/math/SinhTests.java | 35 +++++++++++-- .../function/scalar/math/SqrtTests.java | 50 +++++++++++++++++-- 21 files changed, 387 insertions(+), 100 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 3ecb31722277c..ed1064b5b57f1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -269,13 +269,15 @@ a:double | acos:double // end::acos-result[] ; -acosNan +acosNull ROW a=12.0 | EVAL acos=ACOS(a) ; +warning:Line 2:13: evaluation of [ACOS(a)] failed, treating result as null. Only first 20 failures recorded. +warning:java.lang.ArithmeticException: Acos input out of range a:double | acos:double - 12 | NaN + 12 | null ; sin @@ -317,13 +319,15 @@ a:double | asin:double // end::asin-result[] ; -asinNan +asinNull ROW a=12.0 | EVAL asin=ASIN(a) ; +warning:Line 2:13: evaluation of [ASIN(a)] failed, treating result as null. Only first 20 failures recorded. +warning:java.lang.ArithmeticException: Asin input out of range a:double | asin:double - 12 | NaN + 12 | null ; tan diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index 93b7ebe2156ed..ebd54c4e384a7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1019,10 +1019,12 @@ l:ul | s:double ; sqrtOfNegative -row d = -1.0 | eval s = is_nan(sqrt(d)); +row d = -1.0 | eval s = sqrt(d); +warning:Line 1:25: evaluation of [sqrt(d)] failed, treating result as null. Only first 20 failures recorded. +warning:java.lang.ArithmeticException: Square root of negative -d:double | s:boolean --1.0 | true +d:double | s:double +-1.0 | null ; sqrtOfNan diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index db47de5027f07..3d95122009b7d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -11,15 +12,20 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Acos}. * This class is generated. Do not edit it. */ public final class AcosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public AcosEvaluator(EvalOperator.ExpressionEvaluator val) { + public AcosEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -34,7 +40,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { @@ -44,15 +50,25 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Acos.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Acos.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, DoubleVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, DoubleVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Acos.process(valVector.getDouble(p))); + try { + result.appendDouble(Acos.process(valVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 0c7cae266b348..61cced3385905 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -11,15 +12,20 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Asin}. * This class is generated. Do not edit it. */ public final class AsinEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public AsinEvaluator(EvalOperator.ExpressionEvaluator val) { + public AsinEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -34,7 +40,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { @@ -44,15 +50,25 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Asin.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Asin.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, DoubleVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, DoubleVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Asin.process(valVector.getDouble(p))); + try { + result.appendDouble(Asin.process(valVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index c0e3a1937bb26..2ff9dc6e8ef4d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -11,15 +12,20 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cosh}. * This class is generated. Do not edit it. */ public final class CoshEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public CoshEvaluator(EvalOperator.ExpressionEvaluator val) { + public CoshEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -34,7 +40,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { @@ -44,15 +50,25 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Cosh.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Cosh.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, DoubleVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, DoubleVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cosh.process(valVector.getDouble(p))); + try { + result.appendDouble(Cosh.process(valVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index da5ce241e645c..43ec78d3289f8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -11,15 +12,20 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sinh}. * This class is generated. Do not edit it. */ public final class SinhEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public SinhEvaluator(EvalOperator.ExpressionEvaluator val) { + public SinhEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -34,7 +40,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { @@ -44,15 +50,25 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Sinh.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Sinh.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, DoubleVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, DoubleVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sinh.process(valVector.getDouble(p))); + try { + result.appendDouble(Sinh.process(valVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 3efce9f4f0f98..c60176d3e7135 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -11,15 +12,20 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. * This class is generated. Do not edit it. */ public final class SqrtDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public SqrtDoubleEvaluator(EvalOperator.ExpressionEvaluator val) { + public SqrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -34,7 +40,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { @@ -44,15 +50,25 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Sqrt.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Sqrt.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, DoubleVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, DoubleVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sqrt.process(valVector.getDouble(p))); + try { + result.appendDouble(Sqrt.process(valVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index a4ab65c58f151..1241e35e8f5db 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -4,24 +4,29 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. * This class is generated. Do not edit it. */ public final class SqrtIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public SqrtIntEvaluator(EvalOperator.ExpressionEvaluator val) { + public SqrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -36,7 +41,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, IntBlock valBlock) { @@ -46,15 +51,25 @@ public DoubleBlock eval(int positionCount, IntBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Sqrt.process(valBlock.getInt(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Sqrt.process(valBlock.getInt(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, IntVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, IntVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sqrt.process(valVector.getInt(p))); + try { + result.appendDouble(Sqrt.process(valVector.getInt(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 28939040d0dfc..8dc27fada343a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -4,24 +4,29 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. * This class is generated. Do not edit it. */ public final class SqrtLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; - public SqrtLongEvaluator(EvalOperator.ExpressionEvaluator val) { + public SqrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + this.warnings = new Warnings(source); this.val = val; } @@ -36,7 +41,7 @@ public Block eval(Page page) { if (valVector == null) { return eval(page.getPositionCount(), valBlock); } - return eval(page.getPositionCount(), valVector).asBlock(); + return eval(page.getPositionCount(), valVector); } public DoubleBlock eval(int positionCount, LongBlock valBlock) { @@ -46,15 +51,25 @@ public DoubleBlock eval(int positionCount, LongBlock valBlock) { result.appendNull(); continue position; } - result.appendDouble(Sqrt.process(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + try { + result.appendDouble(Sqrt.process(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } - public DoubleVector eval(int positionCount, LongVector valVector) { - DoubleVector.Builder result = DoubleVector.newVectorBuilder(positionCount); + public DoubleBlock eval(int positionCount, LongVector valVector) { + DoubleBlock.Builder result = DoubleBlock.newBlockBuilder(positionCount); position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sqrt.process(valVector.getLong(p))); + try { + result.appendDouble(Sqrt.process(valVector.getLong(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index 75fb4571c9ff6..c81d6d0dbea46 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -26,7 +26,7 @@ public Acos(Source source, @Named("n") Expression n) { @Override protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new AcosEvaluator(field); + return new AcosEvaluator(source(), field); } @Override @@ -39,8 +39,11 @@ protected NodeInfo info() { return NodeInfo.create(this, Acos::new, field()); } - @Evaluator + @Evaluator(warnExceptions = ArithmeticException.class) static double process(double val) { + if (Math.abs(val) > 1) { + throw new ArithmeticException("Acos input out of range"); + } return Math.acos(val); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index cc964c92f7c61..c40b1b0004f5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -26,7 +26,7 @@ public Asin(Source source, @Named("n") Expression n) { @Override protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new AsinEvaluator(field); + return new AsinEvaluator(source(), field); } @Override @@ -39,8 +39,11 @@ protected NodeInfo info() { return NodeInfo.create(this, Asin::new, field()); } - @Evaluator + @Evaluator(warnExceptions = ArithmeticException.class) static double process(double val) { + if (Math.abs(val) > 1) { + throw new ArithmeticException("Asin input out of range"); + } return Math.asin(val); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 5f9e72e80d097..0652509a78144 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -26,7 +26,7 @@ public Cosh(Source source, @Named("n") Expression n) { @Override protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new CoshEvaluator(field); + return new CoshEvaluator(source(), field); } @Override @@ -39,8 +39,12 @@ protected NodeInfo info() { return NodeInfo.create(this, Cosh::new, field()); } - @Evaluator + @Evaluator(warnExceptions = ArithmeticException.class) static double process(double val) { - return Math.cosh(val); + double res = Math.cosh(val); + if (Double.isNaN(res) || Double.isInfinite(res)) { + throw new ArithmeticException("cosh overflow"); + } + return res; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 4184ee1c99b83..a790b6c52c184 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -26,7 +26,7 @@ public Sinh(Source source, @Named("n") Expression n) { @Override protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new SinhEvaluator(field); + return new SinhEvaluator(source(), field); } @Override @@ -39,8 +39,12 @@ protected NodeInfo info() { return NodeInfo.create(this, Sinh::new, field()); } - @Evaluator + @Evaluator(warnExceptions = ArithmeticException.class) static double process(double val) { - return Math.sinh(val); + double res = Math.sinh(val); + if (Double.isNaN(res) || Double.isInfinite(res)) { + throw new ArithmeticException("sinh overflow"); + } + return res; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index f20e2d81ca749..b2386bdd9abad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -41,13 +41,13 @@ public Supplier toEvaluator( var eval = field.get(); if (fieldType == DataTypes.DOUBLE) { - return () -> new SqrtDoubleEvaluator(eval); + return () -> new SqrtDoubleEvaluator(source(), eval); } if (fieldType == DataTypes.INTEGER) { - return () -> new SqrtIntEvaluator(eval); + return () -> new SqrtIntEvaluator(source(), eval); } if (fieldType == DataTypes.LONG) { - return () -> new SqrtLongEvaluator(eval); + return () -> new SqrtLongEvaluator(source(), eval); } if (fieldType == DataTypes.UNSIGNED_LONG) { return () -> new SqrtUnsignedLongEvaluator(eval); @@ -56,13 +56,19 @@ public Supplier toEvaluator( throw EsqlIllegalArgumentException.illegalDataType(fieldType); } - @Evaluator(extraName = "Double") + @Evaluator(extraName = "Double", warnExceptions = ArithmeticException.class) static double process(double val) { + if (val < 0) { + throw new ArithmeticException("Square root of negative"); + } return Math.sqrt(val); } - @Evaluator(extraName = "Long") + @Evaluator(extraName = "Long", warnExceptions = ArithmeticException.class) static double process(long val) { + if (val < 0) { + throw new ArithmeticException("Square root of negative"); + } return Math.sqrt(val); } @@ -71,8 +77,11 @@ static double processUnsignedLong(long val) { return Math.sqrt(NumericUtils.unsignedLongToDouble(val)); } - @Evaluator(extraName = "Int") + @Evaluator(extraName = "Int", warnExceptions = ArithmeticException.class) static double process(int val) { + if (val < 0) { + throw new ArithmeticException("Square root of negative"); + } return Math.sqrt(val); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 83900b5d1bd77..1db97943dfdad 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -64,6 +64,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; /** @@ -179,6 +180,9 @@ public final void testEvaluate() { assertThat(expression.dataType(), equalTo(testCase.expectedType)); // TODO should we convert unsigned_long into BigDecimal so it's easier to assert? Object result = toJavaObject(evaluator(expression).get().eval(row(testCase.getDataValues())), 0); + assertThat(result, not(equalTo(Double.NaN))); + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); assertThat(result, testCase.getMatcher()); if (testCase.getExpectedWarnings() != null) { assertWarnings(testCase.getExpectedWarnings()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 0af3d11c6065d..073f82847e87a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -28,11 +28,11 @@ import java.util.List; import java.util.function.DoubleBinaryOperator; import java.util.function.DoubleFunction; -import java.util.function.DoubleUnaryOperator; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.LongFunction; import java.util.function.Supplier; +import java.util.function.UnaryOperator; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; @@ -89,7 +89,7 @@ public String toString() { public static List forUnaryCastingToDouble( String name, String argName, - DoubleUnaryOperator expected, + UnaryOperator expected, Double min, Double max, List warnings @@ -101,7 +101,7 @@ public static List forUnaryCastingToDouble( suppliers, eval + castToDoubleEvaluator(read, DataTypes.INTEGER) + "]", DataTypes.DOUBLE, - i -> expected.applyAsDouble(i), + i -> expected.apply(Double.valueOf(i)), min.intValue(), max.intValue(), warnings @@ -110,7 +110,7 @@ public static List forUnaryCastingToDouble( suppliers, eval + castToDoubleEvaluator(read, DataTypes.LONG) + "]", DataTypes.DOUBLE, - l -> expected.applyAsDouble(l), + i -> expected.apply(Double.valueOf(i)), min.longValue(), max.longValue(), warnings @@ -119,12 +119,12 @@ public static List forUnaryCastingToDouble( suppliers, eval + castToDoubleEvaluator(read, DataTypes.UNSIGNED_LONG) + "]", DataTypes.DOUBLE, - ul -> expected.applyAsDouble(ul.doubleValue()), + ul -> expected.apply(ul.doubleValue()), BigInteger.valueOf((int) Math.ceil(min)), BigInteger.valueOf((int) Math.floor(max)), warnings ); - forUnaryDouble(suppliers, eval + read + "]", DataTypes.DOUBLE, i -> expected.applyAsDouble(i), min, max, warnings); + forUnaryDouble(suppliers, eval + read + "]", DataTypes.DOUBLE, expected::apply, min, max, warnings); return suppliers; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java index 12bc9c48827f5..8f1b30b2de008 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java @@ -25,15 +25,38 @@ public AcosTests(@Name("TestCase") Supplier testCaseS @ParametersFactory public static Iterable parameters() { - List suppliers = TestCaseSupplier.forUnaryCastingToDouble( - "AcosEvaluator", - "val", - Math::acos, - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - List.of() + // values in range + List suppliers = TestCaseSupplier.forUnaryCastingToDouble("AcosEvaluator", "val", Math::acos, -1d, 1d, List.of()); + suppliers = anyNullIsNull(true, suppliers); + + // Values out of range + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "AcosEvaluator", + "val", + k -> null, + Double.NEGATIVE_INFINITY, + Math.nextDown(-1d), + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Acos input out of range" + ) + ) + ); + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "AcosEvaluator", + "val", + k -> null, + Math.nextUp(1d), + Double.POSITIVE_INFINITY, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Acos input out of range" + ) + ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java index 7cba8e88940c6..f6df0f33cae59 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java @@ -25,15 +25,38 @@ public AsinTests(@Name("TestCase") Supplier testCaseS @ParametersFactory public static Iterable parameters() { - List suppliers = TestCaseSupplier.forUnaryCastingToDouble( - "AsinEvaluator", - "val", - Math::asin, - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - List.of() + // values in range + List suppliers = TestCaseSupplier.forUnaryCastingToDouble("AsinEvaluator", "val", Math::asin, -1d, 1d, List.of()); + suppliers = anyNullIsNull(true, suppliers); + + // Values out of range + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "AsinEvaluator", + "val", + k -> null, + Double.NEGATIVE_INFINITY, + Math.nextDown(-1d), + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Asin input out of range" + ) + ) + ); + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "AsinEvaluator", + "val", + k -> null, + Math.nextUp(1d), + Double.POSITIVE_INFINITY, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Asin input out of range" + ) + ) ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java index 2a1e81b60a02f..c1fbc63e23d76 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java @@ -29,11 +29,40 @@ public static Iterable parameters() { "CoshEvaluator", "val", Math::cosh, - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, + -710d, + 710d, // Hyperbolic Cosine grows extremely fast. Values outside this range return Double.POSITIVE_INFINITY List.of() ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + suppliers = anyNullIsNull(true, suppliers); + + // Out of range cases + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "CoshEvaluator", + "val", + k -> null, + Double.NEGATIVE_INFINITY, + -711d, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: cosh overflow" + ) + ) + ); + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "CoshEvaluator", + "val", + k -> null, + 711d, + Double.POSITIVE_INFINITY, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: cosh overflow" + ) + ) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java index aad1e35a09da4..f852eaecf0861 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java @@ -29,11 +29,40 @@ public static Iterable parameters() { "SinhEvaluator", "val", Math::sinh, - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, + -710d, + 710d, // Hyperbolic sine grows extremely fast. Values outside this range return Double.POSITIVE_INFINITY List.of() ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + suppliers = anyNullIsNull(true, suppliers); + + // Out of range cases + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "SinhEvaluator", + "val", + k -> null, + Double.NEGATIVE_INFINITY, + -711d, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: sinh overflow" + ) + ) + ); + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "SinhEvaluator", + "val", + k -> null, + 711d, + Double.POSITIVE_INFINITY, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: sinh overflow" + ) + ) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index 4e7b08dcf63be..d5430c6be7a81 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -31,12 +31,13 @@ public SqrtTests(@Name("TestCase") Supplier testCaseS public static Iterable parameters() { String read = "Attribute[channel=0]"; List suppliers = new ArrayList<>(); + // Valid values TestCaseSupplier.forUnaryInt( suppliers, "SqrtIntEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::sqrt, - Integer.MIN_VALUE, + 0, Integer.MAX_VALUE, List.of() ); @@ -45,7 +46,7 @@ public static Iterable parameters() { "SqrtLongEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::sqrt, - Long.MIN_VALUE, + 0, Long.MAX_VALUE, List.of() ); @@ -63,11 +64,50 @@ public static Iterable parameters() { "SqrtDoubleEvaluator[val=" + read + "]", DataTypes.DOUBLE, Math::sqrt, - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, + -0d, + Double.MAX_VALUE, List.of() ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + suppliers = anyNullIsNull(true, suppliers); + + // Out of range values (there are no out of range unsigned longs) + TestCaseSupplier.forUnaryInt( + suppliers, + "SqrtIntEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + k -> null, + Integer.MIN_VALUE, + -1, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Square root of negative" + ) + ); + TestCaseSupplier.forUnaryLong( + suppliers, + "SqrtLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + k -> null, + Long.MIN_VALUE, + -1, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Square root of negative" + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "SqrtDoubleEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + k -> null, + Double.NEGATIVE_INFINITY, + -Double.MIN_VALUE, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "java.lang.ArithmeticException: Square root of negative" + ) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } @Override From abee4d3ea86262156d0b57c313e2401456f6179d Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 12 Sep 2023 21:52:34 +0100 Subject: [PATCH 026/114] Inference API (#99224) Adds the _inference API for managing inference models and performing inference. Inference is a new plugin in XPack that creates a new system index (.inference) for storing the model configurations. Models configurations are managed with the standard PUT, GET, DELETE requests and POST to perform inference. This PR creates an inference service for deploying and inferring on the ELSER model. --- docs/changelog/99224.yaml | 5 + .../api/inference.delete_model.json | 33 +++ .../api/inference.get_model.json | 33 +++ .../api/inference.inference.json | 36 +++ .../api/inference.put_model.json | 36 +++ .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/core/ClientHelper.java | 1 + x-pack/plugin/inference/build.gradle | 27 +++ .../integration/ModelRegistryIT.java | 217 ++++++++++++++++++ .../inference/src/main/java/module-info.java | 22 ++ .../xpack/inference/InferenceIndex.java | 96 ++++++++ .../InferenceNamedWriteablesProvider.java | 41 ++++ .../xpack/inference/InferencePlugin.java | 158 +++++++++++++ .../elasticsearch/xpack/inference/Model.java | 123 ++++++++++ .../xpack/inference/ServiceSettings.java | 15 ++ .../xpack/inference/TaskSettings.java | 13 ++ .../xpack/inference/TaskType.java | 52 +++++ .../xpack/inference/UnparsedModel.java | 33 +++ .../action/DeleteInferenceModelAction.java | 79 +++++++ .../action/GetInferenceModelAction.java | 78 +++++++ .../inference/action/InferenceAction.java | 208 +++++++++++++++++ .../action/PutInferenceModelAction.java | 145 ++++++++++++ .../TransportDeleteInferenceModelAction.java | 68 ++++++ .../TransportGetInferenceModelAction.java | 65 ++++++ .../action/TransportInferenceAction.java | 94 ++++++++ .../TransportPutInferenceModelAction.java | 119 ++++++++++ .../inference/registry/ModelRegistry.java | 116 ++++++++++ .../inference/registry/ServiceRegistry.java | 31 +++ .../rest/RestDeleteInferenceModelAction.java | 39 ++++ .../rest/RestGetInferenceModelAction.java | 40 ++++ .../inference/rest/RestInferenceAction.java | 40 ++++ .../rest/RestPutInferenceModelAction.java | 40 ++++ .../inference/results/InferenceResult.java | 13 ++ .../results/SparseEmbeddingResult.java | 82 +++++++ .../inference/services/InferenceService.java | 64 ++++++ .../inference/services/MapParsingUtils.java | 70 ++++++ .../services/elser/ElserMlNodeModel.java | 34 +++ .../services/elser/ElserMlNodeService.java | 155 +++++++++++++ .../elser/ElserMlNodeServiceSettings.java | 126 ++++++++++ .../elser/ElserMlNodeTaskSettings.java | 63 +++++ .../xpack/inference/ModelTests.java | 79 +++++++ .../action/GetInferenceModelRequestTests.java | 41 ++++ .../action/InferenceActionRequestTests.java | 70 ++++++ .../action/InferenceActionResponseTests.java | 39 ++++ .../action/PutInferenceModelRequestTests.java | 61 +++++ .../PutInferenceModelResponseTests.java | 38 +++ .../registry/ServiceRegistryTests.java | 28 +++ .../results/SparseEmbeddingResultTests.java | 47 ++++ .../services/MapParsingUtilsTests.java | 91 ++++++++ .../ElserMlNodeServiceSettingsTests.java | 76 ++++++ .../elser/ElserMlNodeServiceTests.java | 28 +++ .../elser/ElserMlNodeTaskSettingsTests.java | 33 +++ .../xpack/security/operator/Constants.java | 4 + .../security/authz/AuthorizationUtils.java | 2 + .../test/inference/inference_crud.yml | 41 ++++ 55 files changed, 3389 insertions(+) create mode 100644 docs/changelog/99224.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json create mode 100644 x-pack/plugin/inference/build.gradle create mode 100644 x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java create mode 100644 x-pack/plugin/inference/src/main/java/module-info.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceIndex.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/Model.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/ServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskType.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ServiceRegistry.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/InferenceResult.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResult.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/InferenceService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ServiceRegistryTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettingsTests.java create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml diff --git a/docs/changelog/99224.yaml b/docs/changelog/99224.yaml new file mode 100644 index 0000000000000..cde4084ab0e84 --- /dev/null +++ b/docs/changelog/99224.yaml @@ -0,0 +1,5 @@ +pr: 99224 +summary: Add new _inference API +area: Machine Learning +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json new file mode 100644 index 0000000000000..09623569435e2 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json @@ -0,0 +1,33 @@ +{ + "inference.delete_model":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/inference_delete_model.html", + "description":"Delete model in the Inference API" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_inference/{task_type}/{model_id}", + "methods":[ + "DELETE" + ], + "parts":{ + "task_type":{ + "type":"string", + "description":"The model task type" + }, + "model_id":{ + "type":"string", + "description":"The model Id" + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json new file mode 100644 index 0000000000000..33626e21d87e7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json @@ -0,0 +1,33 @@ +{ + "inference.get_model":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/inference_get_model.html", + "description":"Get a model in the Inference API" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_inference/{task_type}/{model_id}", + "methods":[ + "GET" + ], + "parts":{ + "task_type":{ + "type":"string", + "description":"The model task type" + }, + "model_id":{ + "type":"string", + "description":"The model Id" + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json new file mode 100644 index 0000000000000..7b7f3e21b83c3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json @@ -0,0 +1,36 @@ +{ + "inference.inference":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/inference.html", + "description":"Perform inference on a model" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_inference/{task_type}/{model_id}", + "methods":[ + "POST" + ], + "parts":{ + "task_type":{ + "type":"string", + "description":"The model task type" + }, + "model_id":{ + "type":"string", + "description":"The model Id" + } + } + } + ] + }, + "body":{ + "description":"The inference payload" + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json new file mode 100644 index 0000000000000..aab1a9d41b70c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json @@ -0,0 +1,36 @@ +{ + "inference.put_model":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/inference_put_model.html", + "description":"Configure a model for use in the Inference API" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_inference/{task_type}/{model_id}", + "methods":[ + "PUT" + ], + "parts":{ + "task_type":{ + "type":"string", + "description":"The model task type" + }, + "model_id":{ + "type":"string", + "description":"The model Id" + } + } + } + ] + }, + "body":{ + "description":"The model's task and service settings" + } + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 3bc2bd7ce8499..0c16ee5cb068b 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -137,6 +137,7 @@ static TransportVersion def(int id, String uniqueId) { public static final TransportVersion V_8_500_071 = def(8_500_071, "a86dfc08-3026-4f01-90ef-6d6de003e217"); public static final TransportVersion V_8_500_072 = def(8_500_072, "e2df7d80-7b74-4afd-9734-aee0fc256025"); public static final TransportVersion V_8_500_073 = def(8_500_073, "9128e16a-e4f7-41c4-b04f-842955bfc1b4"); + public static final TransportVersion V_8_500_074 = def(8_500_074, "aab0c31c-62d7-4b95-bb29-0b6f367ece64"); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 4cf8c72c087ca..fc81fc2f1eb75 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -192,6 +192,7 @@ private static String maybeRewriteSingleAuthenticationHeaderForVersion( public static final String LOGSTASH_MANAGEMENT_ORIGIN = "logstash_management"; public static final String FLEET_ORIGIN = "fleet"; public static final String ENT_SEARCH_ORIGIN = "enterprise_search"; + public static final String INFERENCE_ORIGIN = "inference"; private ClientHelper() {} diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle new file mode 100644 index 0000000000000..8222eb807d3ee --- /dev/null +++ b/x-pack/plugin/inference/build.gradle @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.OS + +apply plugin: 'elasticsearch.internal-es-plugin' +apply plugin: 'elasticsearch.internal-cluster-test' + +esplugin { + name 'inference' + description 'Configuration and evaluation of inference models' + classname 'org.elasticsearch.xpack.inference.InferencePlugin' + extendedPlugins = ['x-pack-core'] +} + +dependencies { + implementation project(path: ':libs:elasticsearch-logging') + compileOnly project(":server") + compileOnly project(path: xpackModule('core')) + testImplementation(testArtifact(project(xpackModule('core')))) + testImplementation project(':modules:reindex') +} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java new file mode 100644 index 0000000000000..fbb6bb7e316ff --- /dev/null +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.integration; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.ServiceSettings; +import org.elasticsearch.xpack.inference.TaskSettings; +import org.elasticsearch.xpack.inference.TaskType; +import org.elasticsearch.xpack.inference.UnparsedModel; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeModel; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceTests; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettingsTests; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +public class ModelRegistryIT extends ESSingleNodeTestCase { + + private ModelRegistry modelRegistry; + + @Before + public void createComponents() { + modelRegistry = new ModelRegistry(client()); + } + + @Override + protected Collection> getPlugins() { + return pluginList(ReindexPlugin.class, InferencePlugin.class); + } + + public void testStoreModel() throws Exception { + String modelId = "test-store-model"; + Model model = buildModelConfig(modelId, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + AtomicReference storeModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); + + assertThat(storeModelHolder.get(), is(true)); + assertThat(exceptionHolder.get(), is(nullValue())); + } + + public void testStoreModelWithUnknownFields() throws Exception { + String modelId = "test-store-model-unknown-field"; + Model model = buildModelWithUnknownField(modelId); + AtomicReference storeModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); + + assertNull(storeModelHolder.get()); + assertNotNull(exceptionHolder.get()); + assertThat(exceptionHolder.get(), instanceOf(ElasticsearchStatusException.class)); + ElasticsearchStatusException statusException = (ElasticsearchStatusException) exceptionHolder.get(); + assertThat( + statusException.getRootCause().getMessage(), + containsString("mapping set to strict, dynamic introduction of [unknown_field] within [_doc] is not allowed") + ); + assertThat(exceptionHolder.get().getMessage(), containsString("Failed to store inference model [" + modelId + "]")); + } + + public void testGetModel() throws Exception { + String modelId = "test-get-model"; + Model model = buildModelConfig(modelId, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + + // now get the model + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getUnparsedModelMap(modelId, listener), modelHolder, exceptionHolder); + assertThat(exceptionHolder.get(), is(nullValue())); + assertThat(modelHolder.get(), not(nullValue())); + + UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config()); + assertEquals(model.getService(), unparsedModel.service()); + ElserMlNodeModel roundTripModel = ElserMlNodeService.parseConfig( + false, + unparsedModel.modelId(), + unparsedModel.taskType(), + unparsedModel.settings() + ); + assertEquals(model, roundTripModel); + } + + public void testStoreModelFailsWhenModelExists() throws Exception { + String modelId = "test-put-trained-model-config-exists"; + Model model = buildModelConfig(modelId, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + assertThat(exceptionHolder.get(), is(nullValue())); + + putModelHolder.set(false); + // an model with the same id exists + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(false)); + assertThat(exceptionHolder.get(), not(nullValue())); + assertThat( + exceptionHolder.get().getMessage(), + containsString("Inference model [test-put-trained-model-config-exists] already exists") + ); + } + + public void testDeleteModel() throws Exception { + // put models + for (var id : new String[] { "model1", "model2", "model3" }) { + Model model = buildModelConfig(id, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + } + + AtomicReference deleteResponseHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); + assertThat(exceptionHolder.get(), is(nullValue())); + assertTrue(deleteResponseHolder.get()); + + // get should fail + deleteResponseHolder.set(false); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getUnparsedModelMap("model1", listener), modelHolder, exceptionHolder); + + assertThat(exceptionHolder.get(), not(nullValue())); + assertFalse(deleteResponseHolder.get()); + assertThat(exceptionHolder.get().getMessage(), containsString("Model not found [model1]")); + } + + private Model buildModelConfig(String modelId, String service, TaskType taskType) { + return switch (service) { + case ElserMlNodeService.NAME -> ElserMlNodeServiceTests.randomModelConfig(modelId, taskType); + default -> throw new IllegalArgumentException("unknown service " + service); + }; + } + + protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) + throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = ActionListener.wrap(r -> { + response.set(r); + latch.countDown(); + }, e -> { + error.set(e); + latch.countDown(); + }); + + function.accept(listener); + latch.await(); + } + + private static ModelWithUnknownField buildModelWithUnknownField(String modelId) { + return new ModelWithUnknownField( + modelId, + TaskType.SPARSE_EMBEDDING, + ElserMlNodeService.NAME, + ElserMlNodeServiceSettingsTests.createRandom(), + ElserMlNodeTaskSettingsTests.createRandom() + ); + } + + private static class ModelWithUnknownField extends Model { + + ModelWithUnknownField( + String modelId, + TaskType taskType, + String service, + ServiceSettings serviceSettings, + TaskSettings taskSettings + ) { + super(modelId, taskType, service, serviceSettings, taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("unknown_field", "foo"); + builder.field(MODEL_ID, getModelId()); + builder.field(TaskType.NAME, getTaskType().toString()); + builder.field(SERVICE, getService()); + builder.field(SERVICE_SETTINGS, getServiceSettings()); + builder.field(TASK_SETTINGS, getTaskSettings()); + builder.endObject(); + return builder; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java new file mode 100644 index 0000000000000..e80d828e4e48d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +module org.elasticsearch.xpack.inference { + requires org.elasticsearch.base; + requires org.elasticsearch.server; + requires org.elasticsearch.xcontent; + requires org.elasticsearch.xcore; + + requires org.apache.httpcomponents.httpclient; + requires org.apache.logging.log4j; + + exports org.elasticsearch.xpack.inference.rest; + exports org.elasticsearch.xpack.inference.action; + exports org.elasticsearch.xpack.inference.registry; + exports org.elasticsearch.xpack.inference.results; + exports org.elasticsearch.xpack.inference; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceIndex.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceIndex.java new file mode 100644 index 0000000000000..b416e1cfdaf83 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceIndex.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.io.UncheckedIOException; + +import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; + +public class InferenceIndex { + + private InferenceIndex() {} + + public static final String INDEX_NAME = ".inference"; + public static final String INDEX_PATTERN = INDEX_NAME + "*"; + + // Increment this version number when the mappings change + private static final int INDEX_MAPPING_VERSION = 1; + + public static Settings settings() { + return Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") + .build(); + } + + /** + * Reject any unknown fields being added by setting dynamic mappings to + * {@code strict} for the top level object. A document that contains unknown + * fields in the document root will be rejected at index time. + * + * The {@code service_settings} and {@code task_settings} objects + * have dynamic mappings set to {@code false} which means all fields will + * be accepted without throwing an error but those fields are not indexed. + * + * The reason for mixing {@code strict} and {@code false} dynamic settings + * is that {@code service_settings} and {@code task_settings} are defined by + * the inference services and therefore are not known when creating the + * index. However, the top level settings are known in advance and can + * be strictly mapped. + * + * If the top level strict mapping changes then the no new documents should + * be indexed until the index mappings have been updated, this happens + * automatically once all nodes in the cluster are of a compatible version. + * + * @return The index mappings + */ + public static XContentBuilder mappings() { + try { + return jsonBuilder().startObject() + .startObject(SINGLE_MAPPING_NAME) + .startObject("_meta") + .field("version", Version.CURRENT) + .field(SystemIndexDescriptor.VERSION_META_KEY, INDEX_MAPPING_VERSION) + .endObject() + .field("dynamic", "strict") + .startObject("properties") + .startObject("model_id") + .field("type", "keyword") + .endObject() + .startObject("task_type") + .field("type", "keyword") + .endObject() + .startObject("service") + .field("type", "keyword") + .endObject() + .startObject("service_settings") + .field("dynamic", "false") + .startObject("properties") + .endObject() + .endObject() + .startObject("task_settings") + .field("dynamic", "false") + .startObject("properties") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + } catch (IOException e) { + throw new UncheckedIOException("Failed to build mappings for index " + INDEX_NAME, e); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java new file mode 100644 index 0000000000000..3bbc0a53a9973 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.inference.results.InferenceResult; +import org.elasticsearch.xpack.inference.results.SparseEmbeddingResult; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; + +import java.util.ArrayList; +import java.util.List; + +public class InferenceNamedWriteablesProvider { + + private InferenceNamedWriteablesProvider() {} + + public static List getNamedWriteables() { + List namedWriteables = new ArrayList<>(); + + // ELSER config + namedWriteables.add( + new NamedWriteableRegistry.Entry(ServiceSettings.class, ElserMlNodeServiceSettings.NAME, ElserMlNodeServiceSettings::new) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(TaskSettings.class, ElserMlNodeTaskSettings.NAME, ElserMlNodeTaskSettings::new) + ); + + // Inference results + namedWriteables.add( + new NamedWriteableRegistry.Entry(InferenceResult.class, SparseEmbeddingResult.NAME, SparseEmbeddingResult::new) + ); + + return namedWriteables; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java new file mode 100644 index 0000000000000..1fee5385cea2a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SystemIndexPlugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.inference.action.DeleteInferenceModelAction; +import org.elasticsearch.xpack.inference.action.GetInferenceModelAction; +import org.elasticsearch.xpack.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.action.PutInferenceModelAction; +import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceModelAction; +import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; +import org.elasticsearch.xpack.inference.action.TransportInferenceAction; +import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.registry.ServiceRegistry; +import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; +import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; +import org.elasticsearch.xpack.inference.rest.RestInferenceAction; +import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +public class InferencePlugin extends Plugin implements ActionPlugin, SystemIndexPlugin { + + public static final String NAME = "inference"; + + public static final Setting ENCRYPTION_KEY_SETTING = SecureSetting.secureString("xpack.inference.encryption_key", null); + + @Override + public List> getActions() { + return List.of( + new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), + new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), + new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), + new ActionHandler<>(DeleteInferenceModelAction.INSTANCE, TransportDeleteInferenceModelAction.class) + ); + } + + @Override + public List getNamedWriteables() { + return InferenceNamedWriteablesProvider.getNamedWriteables(); + } + + @Override + public List getNamedXContent() { + return Collections.emptyList(); + } + + @Override + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return List.of( + new RestInferenceAction(), + new RestGetInferenceModelAction(), + new RestPutInferenceModelAction(), + new RestDeleteInferenceModelAction() + ); + } + + @Override + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver expressionResolver, + Supplier repositoriesServiceSupplier, + Tracer tracer, + AllocationService allocationService, + IndicesService indicesService + ) { + ModelRegistry modelRegistry = new ModelRegistry(client); + ServiceRegistry serviceRegistry = new ServiceRegistry(new ElserMlNodeService(client)); + return List.of(modelRegistry, serviceRegistry); + } + + @Override + public List> getSettings() { + return List.of(ENCRYPTION_KEY_SETTING); + } + + @Override + public Collection getSystemIndexDescriptors(Settings settings) { + return List.of( + SystemIndexDescriptor.builder() + .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) + .setIndexPattern(InferenceIndex.INDEX_PATTERN) + .setPrimaryIndex(InferenceIndex.INDEX_NAME) + .setDescription("Contains inference service and model configuration") + .setMappings(InferenceIndex.mappings()) + .setSettings(InferenceIndex.settings()) + .setVersionMetaKey("version") + .setOrigin(ClientHelper.INFERENCE_ORIGIN) + .build() + ); + } + + @Override + public String getFeatureName() { + return "inference_plugin"; + } + + @Override + public String getFeatureDescription() { + return "Inference plugin for managing inference services and inference"; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/Model.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/Model.java new file mode 100644 index 0000000000000..c0032ebf25ca7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/Model.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class Model implements ToXContentObject, VersionedNamedWriteable { + + public static final String MODEL_ID = "model_id"; + public static final String SERVICE = "service"; + public static final String SERVICE_SETTINGS = "service_settings"; + public static final String TASK_SETTINGS = "task_settings"; + + private static final String NAME = "inference_model"; + + public static String documentId(String modelId) { + return "model_" + modelId; + } + + private final String modelId; + private final TaskType taskType; + private final String service; + private final ServiceSettings serviceSettings; + private final TaskSettings taskSettings; + + public Model(String modelId, TaskType taskType, String service, ServiceSettings serviceSettings, TaskSettings taskSettings) { + this.modelId = modelId; + this.taskType = taskType; + this.service = service; + this.serviceSettings = serviceSettings; + this.taskSettings = taskSettings; + } + + public Model(StreamInput in) throws IOException { + this.modelId = in.readString(); + this.taskType = in.readEnum(TaskType.class); + this.service = in.readString(); + this.serviceSettings = in.readNamedWriteable(ServiceSettings.class); + this.taskSettings = in.readNamedWriteable(TaskSettings.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(modelId); + out.writeEnum(taskType); + out.writeString(service); + out.writeNamedWriteable(serviceSettings); + out.writeNamedWriteable(taskSettings); + } + + public String getModelId() { + return modelId; + } + + public TaskType getTaskType() { + return taskType; + } + + public String getService() { + return service; + } + + public ServiceSettings getServiceSettings() { + return serviceSettings; + } + + public TaskSettings getTaskSettings() { + return taskSettings; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MODEL_ID, modelId); + builder.field(TaskType.NAME, taskType.toString()); + builder.field(SERVICE, service); + builder.field(SERVICE_SETTINGS, serviceSettings); + builder.field(TASK_SETTINGS, taskSettings); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_500_074; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Model model = (Model) o; + return Objects.equals(modelId, model.modelId) + && taskType == model.taskType + && Objects.equals(service, model.service) + && Objects.equals(serviceSettings, model.serviceSettings) + && Objects.equals(taskSettings, model.taskSettings); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, taskType, service, serviceSettings, taskSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/ServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/ServiceSettings.java new file mode 100644 index 0000000000000..16f39ad8e560c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/ServiceSettings.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.xcontent.ToXContentObject; + +public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable { + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskSettings.java new file mode 100644 index 0000000000000..200f1b309822d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskSettings.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.xcontent.ToXContentObject; + +public interface TaskSettings extends ToXContentObject, VersionedNamedWriteable {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskType.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskType.java new file mode 100644 index 0000000000000..5e9cc93270033 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/TaskType.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +public enum TaskType implements Writeable { + TEXT_EMBEDDING, + SPARSE_EMBEDDING; + + public static String NAME = "task_type"; + + public static TaskType fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + public static TaskType fromStringOrStatusException(String name) { + try { + TaskType taskType = TaskType.fromString(name); + return Objects.requireNonNull(taskType); + } catch (IllegalArgumentException e) { + throw new ElasticsearchStatusException("Unknown task_type [{}]", RestStatus.BAD_REQUEST, name); + } + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static TaskType fromStream(StreamInput in) throws IOException { + return in.readEnum(TaskType.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java new file mode 100644 index 0000000000000..29b4accf1f4f0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; + +import java.util.Map; + +public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings) { + + public static UnparsedModel unparsedModelFromMap(Map sourceMap) { + String modelId = removeStringOrThrowIfNull(sourceMap, Model.MODEL_ID); + String service = removeStringOrThrowIfNull(sourceMap, Model.SERVICE); + String taskTypeStr = removeStringOrThrowIfNull(sourceMap, TaskType.NAME); + TaskType taskType = TaskType.fromString(taskTypeStr); + + return new UnparsedModel(modelId, taskType, service, sourceMap); + } + + private static String removeStringOrThrowIfNull(Map sourceMap, String fieldName) { + String value = (String) sourceMap.remove(fieldName); + if (value == null) { + throw new ElasticsearchStatusException("Missing required field [{}]", RestStatus.BAD_REQUEST, fieldName); + } + return value; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java new file mode 100644 index 0000000000000..2ae1a33f8f5e3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.inference.TaskType; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteInferenceModelAction extends ActionType { + + public static final DeleteInferenceModelAction INSTANCE = new DeleteInferenceModelAction(); + public static final String NAME = "cluster:admin/xpack/inference/delete"; + + public DeleteInferenceModelAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends AcknowledgedRequest { + + private final String modelId; + private final TaskType taskType; + + public Request(String modelId, String taskType) { + this.modelId = modelId; + this.taskType = TaskType.fromStringOrStatusException(taskType); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.modelId = in.readString(); + this.taskType = TaskType.fromStream(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String getModelId() { + return modelId; + } + + public TaskType getTaskType() { + return taskType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(modelId); + taskType.writeTo(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteInferenceModelAction.Request request = (DeleteInferenceModelAction.Request) o; + return Objects.equals(modelId, request.modelId) && taskType == request.taskType; + } + + @Override + public int hashCode() { + return Objects.hash(modelId, taskType); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java new file mode 100644 index 0000000000000..a80ed84d8a6ed --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.inference.TaskType; + +import java.io.IOException; +import java.util.Objects; + +public class GetInferenceModelAction extends ActionType { + + public static final GetInferenceModelAction INSTANCE = new GetInferenceModelAction(); + public static final String NAME = "cluster:admin/xpack/inference/get"; + + public GetInferenceModelAction() { + super(NAME, PutInferenceModelAction.Response::new); + } + + public static class Request extends AcknowledgedRequest { + + private final String modelId; + private final TaskType taskType; + + public Request(String modelId, String taskType) { + this.modelId = modelId; + this.taskType = TaskType.fromStringOrStatusException(taskType); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.modelId = in.readString(); + this.taskType = TaskType.fromStream(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String getModelId() { + return modelId; + } + + public TaskType getTaskType() { + return taskType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(modelId); + taskType.writeTo(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(modelId, request.modelId) && taskType == request.taskType; + } + + @Override + public int hashCode() { + return Objects.hash(modelId, taskType); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java new file mode 100644 index 0000000000000..2b95e6153a360 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.inference.TaskType; +import org.elasticsearch.xpack.inference.results.InferenceResult; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class InferenceAction extends ActionType { + + public static final InferenceAction INSTANCE = new InferenceAction(); + public static final String NAME = "cluster:monitor/xpack/inference"; + + public InferenceAction() { + super(NAME, Response::new); + } + + public static class Request extends ActionRequest { + + public static final ParseField INPUT = new ParseField("input"); + public static final ParseField TASK_SETTINGS = new ParseField("task_settings"); + + static final ObjectParser PARSER = new ObjectParser<>(NAME, Request.Builder::new); + static { + // TODO timeout + PARSER.declareString(Request.Builder::setInput, INPUT); + PARSER.declareObject(Request.Builder::setTaskSettings, (p, c) -> p.mapOrdered(), TASK_SETTINGS); + } + + public static Request parseRequest(String modelId, String taskType, XContentParser parser) { + Request.Builder builder = PARSER.apply(parser, null); + builder.setModelId(modelId); + builder.setTaskType(taskType); + return builder.build(); + } + + private final TaskType taskType; + private final String modelId; + private final String input; + private final Map taskSettings; + + public Request(TaskType taskType, String modelId, String input, Map taskSettings) { + this.taskType = taskType; + this.modelId = modelId; + this.input = input; + this.taskSettings = taskSettings; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.taskType = TaskType.fromStream(in); + this.modelId = in.readString(); + this.input = in.readString(); + this.taskSettings = in.readMap(); + } + + public TaskType getTaskType() { + return taskType; + } + + public String getModelId() { + return modelId; + } + + public String getInput() { + return input; + } + + public Map getTaskSettings() { + return taskSettings; + } + + @Override + public ActionRequestValidationException validate() { + if (input == null) { + var e = new ActionRequestValidationException(); + e.addValidationError("missing input"); + return e; + } + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + taskType.writeTo(out); + out.writeString(modelId); + out.writeString(input); + out.writeGenericMap(taskSettings); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return taskType == request.taskType + && Objects.equals(modelId, request.modelId) + && Objects.equals(input, request.input) + && Objects.equals(taskSettings, request.taskSettings); + } + + @Override + public int hashCode() { + return Objects.hash(taskType, modelId, input, taskSettings); + } + + public static class Builder { + + private TaskType taskType; + private String modelId; + private String input; + private Map taskSettings = Map.of(); + + private Builder() {} + + public Builder setModelId(String modelId) { + this.modelId = Objects.requireNonNull(modelId); + return this; + } + + public Builder setTaskType(String taskTypeStr) { + try { + TaskType taskType = TaskType.fromString(taskTypeStr); + this.taskType = Objects.requireNonNull(taskType); + } catch (IllegalArgumentException e) { + throw new ElasticsearchStatusException("Unknown task_type [{}]", RestStatus.BAD_REQUEST, taskTypeStr); + } + return this; + } + + public Builder setInput(String input) { + this.input = input; + return this; + } + + public Builder setTaskSettings(Map taskSettings) { + this.taskSettings = taskSettings; + return this; + } + + public Request build() { + return new Request(taskType, modelId, input, taskSettings); + } + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final InferenceResult result; + + public Response(InferenceResult result) { + this.result = result; + } + + public Response(StreamInput in) throws IOException { + super(in); + result = in.readNamedWriteable(InferenceResult.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeNamedWriteable(result); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + result.toXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(result, response.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java new file mode 100644 index 0000000000000..6ec020e6b479a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.TaskType; + +import java.io.IOException; +import java.util.Objects; + +public class PutInferenceModelAction extends ActionType { + + public static final PutInferenceModelAction INSTANCE = new PutInferenceModelAction(); + public static final String NAME = "cluster:admin/xpack/inference/put"; + + public PutInferenceModelAction() { + super(NAME, PutInferenceModelAction.Response::new); + } + + public static class Request extends AcknowledgedRequest { + + private final TaskType taskType; + private final String modelId; + private final BytesReference content; + private final XContentType contentType; + + public Request(String taskType, String modelId, BytesReference content, XContentType contentType) { + this.taskType = TaskType.fromStringOrStatusException(taskType); + this.modelId = modelId; + this.content = content; + this.contentType = contentType; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.modelId = in.readString(); + this.taskType = TaskType.fromStream(in); + this.content = in.readBytesReference(); + this.contentType = in.readEnum(XContentType.class); + } + + public TaskType getTaskType() { + return taskType; + } + + public String getModelId() { + return modelId; + } + + public BytesReference getContent() { + return content; + } + + public XContentType getContentType() { + return contentType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(modelId); + taskType.writeTo(out); + out.writeBytesReference(content); + XContentHelper.writeTo(out, contentType); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return taskType == request.taskType + && Objects.equals(modelId, request.modelId) + && Objects.equals(content, request.content) + && contentType == request.contentType; + } + + @Override + public int hashCode() { + return Objects.hash(taskType, modelId, content, contentType); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final Model model; + + public Response(Model model) { + this.model = model; + } + + public Response(StreamInput in) throws IOException { + super(in); + model = new Model(in); + } + + public Model getModel() { + return model; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + model.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return model.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(model, response.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java new file mode 100644 index 0000000000000..444159b13dcf2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; + +public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { + + private final ModelRegistry modelRegistry; + + @Inject + public TransportDeleteInferenceModelAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ModelRegistry modelRegistry + ) { + super( + DeleteInferenceModelAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + DeleteInferenceModelAction.Request::new, + indexNameExpressionResolver, + ThreadPool.Names.SAME + ); + this.modelRegistry = modelRegistry; + } + + @Override + protected void masterOperation( + Task task, + DeleteInferenceModelAction.Request request, + ClusterState state, + ActionListener listener + ) throws Exception { + modelRegistry.deleteModel( + request.getModelId(), + ActionListener.wrap(r -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) + ); + } + + @Override + protected ClusterBlockException checkBlock(DeleteInferenceModelAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java new file mode 100644 index 0000000000000..f11e6101e1e2a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.inference.UnparsedModel; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.registry.ServiceRegistry; + +public class TransportGetInferenceModelAction extends HandledTransportAction< + GetInferenceModelAction.Request, + PutInferenceModelAction.Response> { + + private final ModelRegistry modelRegistry; + private final ServiceRegistry serviceRegistry; + + @Inject + public TransportGetInferenceModelAction( + TransportService transportService, + ActionFilters actionFilters, + ModelRegistry modelRegistry, + ServiceRegistry serviceRegistry + ) { + super(GetInferenceModelAction.NAME, transportService, actionFilters, GetInferenceModelAction.Request::new); + this.modelRegistry = modelRegistry; + this.serviceRegistry = serviceRegistry; + } + + @Override + protected void doExecute( + Task task, + GetInferenceModelAction.Request request, + ActionListener listener + ) { + modelRegistry.getUnparsedModelMap(request.getModelId(), ActionListener.wrap(modelConfigMap -> { + var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config()); + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + listener.onFailure( + new ElasticsearchStatusException( + "Unknown service [{}] for model [{}]. ", + RestStatus.INTERNAL_SERVER_ERROR, + unparsedModel.service(), + unparsedModel.modelId() + ) + ); + return; + } + var model = service.get().parseConfigLenient(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()); + listener.onResponse(new PutInferenceModelAction.Response(model)); + }, listener::onFailure)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java new file mode 100644 index 0000000000000..42fa61b406e9e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.UnparsedModel; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.registry.ServiceRegistry; +import org.elasticsearch.xpack.inference.services.InferenceService; + +public class TransportInferenceAction extends HandledTransportAction { + + private final ModelRegistry modelRegistry; + private final ServiceRegistry serviceRegistry; + + @Inject + public TransportInferenceAction( + Settings settings, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + ModelRegistry modelRegistry, + ServiceRegistry serviceRegistry + ) { + super(InferenceAction.NAME, transportService, actionFilters, InferenceAction.Request::new); + this.modelRegistry = modelRegistry; + this.serviceRegistry = serviceRegistry; + } + + @Override + protected void doExecute(Task task, InferenceAction.Request request, ActionListener listener) { + + ActionListener getModelListener = ActionListener.wrap(modelConfigMap -> { + var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config()); + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + listener.onFailure( + new ElasticsearchStatusException( + "Unknown service [{}] for model [{}]. ", + RestStatus.INTERNAL_SERVER_ERROR, + unparsedModel.service(), + unparsedModel.modelId() + ) + ); + return; + } + + if (request.getTaskType() != unparsedModel.taskType()) { + listener.onFailure( + new ElasticsearchStatusException( + "Incompatible task_type, the requested type [{}] does not match the model type [{}]", + RestStatus.BAD_REQUEST, + request.getTaskType(), + unparsedModel.taskType() + ) + ); + return; + } + + var model = service.get().parseConfigLenient(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()); + inferOnService(model, request, service.get(), listener); + }, listener::onFailure); + + modelRegistry.getUnparsedModelMap(request.getModelId(), getModelListener); + } + + private void inferOnService( + Model model, + InferenceAction.Request request, + InferenceService service, + ActionListener listener + ) { + service.infer(model, request.getInput(), request.getTaskSettings(), ActionListener.wrap(inferenceResult -> { + listener.onResponse(new InferenceAction.Response(inferenceResult)); + }, listener::onFailure)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java new file mode 100644 index 0000000000000..0f35523726656 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.registry.ServiceRegistry; +import org.elasticsearch.xpack.inference.services.InferenceService; + +import java.io.IOException; +import java.util.Map; + +public class TransportPutInferenceModelAction extends TransportMasterNodeAction< + PutInferenceModelAction.Request, + PutInferenceModelAction.Response> { + + private final ModelRegistry modelRegistry; + private final ServiceRegistry serviceRegistry; + + @Inject + public TransportPutInferenceModelAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ModelRegistry modelRegistry, + ServiceRegistry serviceRegistry + ) { + super( + PutInferenceModelAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + PutInferenceModelAction.Request::new, + indexNameExpressionResolver, + PutInferenceModelAction.Response::new, + ThreadPool.Names.SAME + ); + this.modelRegistry = modelRegistry; + this.serviceRegistry = serviceRegistry; + } + + @Override + protected void masterOperation( + Task task, + PutInferenceModelAction.Request request, + ClusterState state, + ActionListener listener + ) throws Exception { + + var requestAsMap = requestToMap(request); + String serviceName = (String) requestAsMap.remove(Model.SERVICE); + if (serviceName == null) { + listener.onFailure(new ElasticsearchStatusException("Model configuration is missing a service", RestStatus.BAD_REQUEST)); + return; + } + + var service = serviceRegistry.getService(serviceName); + if (service.isEmpty()) { + listener.onFailure(new ElasticsearchStatusException("Unknown service [{}]", RestStatus.BAD_REQUEST, serviceName)); + return; + } + + var model = service.get().parseConfigStrict(request.getModelId(), request.getTaskType(), requestAsMap); + // model is valid good to persist then start + this.modelRegistry.storeModel( + model, + ActionListener.wrap(r -> { startModel(service.get(), model, listener); }, listener::onFailure) + ); + } + + private static void startModel(InferenceService service, Model model, ActionListener listener) { + service.start( + model, + ActionListener.wrap(ok -> listener.onResponse(new PutInferenceModelAction.Response(model)), listener::onFailure) + ); + } + + private Map requestToMap(PutInferenceModelAction.Request request) throws IOException { + try ( + XContentParser parser = XContentHelper.createParser( + XContentParserConfiguration.EMPTY, + request.getContent(), + request.getContentType() + ) + ) { + return parser.map(); + } + } + + @Override + protected ClusterBlockException checkBlock(PutInferenceModelAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java new file mode 100644 index 0000000000000..5ad9554959a27 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.registry; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.DeleteByQueryAction; +import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.inference.InferenceIndex; +import org.elasticsearch.xpack.inference.Model; + +import java.io.IOException; +import java.util.Map; + +public class ModelRegistry { + public record ModelConfigMap(Map config) {} + + private final OriginSettingClient client; + + public ModelRegistry(Client client) { + this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); + } + + public void getUnparsedModelMap(String modelId, ActionListener listener) { + ActionListener searchListener = ActionListener.wrap(searchResponse -> { + if (searchResponse.getHits().getHits().length == 0) { + listener.onFailure(new ResourceNotFoundException("Model not found [{}]", modelId)); + return; + } + + var hits = searchResponse.getHits().getHits(); + assert hits.length == 1; + listener.onResponse(new ModelConfigMap(hits[0].getSourceAsMap())); + + }, listener::onFailure); + + QueryBuilder queryBuilder = documentIdQuery(modelId); + SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN).setQuery(queryBuilder).setSize(1).request(); + + client.search(modelSearch, searchListener); + } + + public void storeModel(Model model, ActionListener listener) { + IndexRequest request = createIndexRequest(Model.documentId(model.getModelId()), InferenceIndex.INDEX_NAME, model, false); + request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + client.index(request, ActionListener.wrap(indexResponse -> listener.onResponse(true), e -> { + if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) { + listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", model.getModelId())); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to store inference model [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + e, + model.getModelId() + ) + ); + } + })); + } + + public void deleteModel(String modelId, ActionListener listener) { + DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); + request.indices(InferenceIndex.INDEX_PATTERN); + request.setQuery(documentIdQuery(modelId)); + request.setRefresh(true); + + client.execute( + DeleteByQueryAction.INSTANCE, + request, + ActionListener.wrap(r -> listener.onResponse(Boolean.TRUE), listener::onFailure) + ); + } + + private static IndexRequest createIndexRequest(String docId, String indexName, ToXContentObject body, boolean allowOverwriting) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + var request = new IndexRequest(indexName); + XContentBuilder source = body.toXContent(builder, ToXContent.EMPTY_PARAMS); + var operation = allowOverwriting ? DocWriteRequest.OpType.INDEX : DocWriteRequest.OpType.CREATE; + + return request.opType(operation).id(docId).source(source); + } catch (IOException ex) { + throw new ElasticsearchException("Unexpected serialization exception for [" + docId + "]", ex); + } + } + + private QueryBuilder documentIdQuery(String modelId) { + return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(modelId))); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ServiceRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ServiceRegistry.java new file mode 100644 index 0000000000000..8767630f5625b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ServiceRegistry.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.registry; + +import org.elasticsearch.xpack.inference.services.InferenceService; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; + +import java.util.Optional; + +public class ServiceRegistry { + + ElserMlNodeService elserService; + + public ServiceRegistry(ElserMlNodeService elserService) { + this.elserService = elserService; + } + + public Optional getService(String name) { + if (name.equals(ElserMlNodeService.NAME)) { + return Optional.of(elserService); + } + + return Optional.empty(); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java new file mode 100644 index 0000000000000..74050d4b32e89 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.inference.action.DeleteInferenceModelAction; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; + +public class RestDeleteInferenceModelAction extends BaseRestHandler { + @Override + public String getName() { + return "delete_inference_model_action"; + } + + @Override + public List routes() { + return List.of(new Route(DELETE, "_inference/{task_type}/{model_id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String taskType = restRequest.param("task_type"); + String modelId = restRequest.param("model_id"); + + var request = new DeleteInferenceModelAction.Request(modelId, taskType); + return channel -> client.execute(DeleteInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java new file mode 100644 index 0000000000000..f57c800bd5bdc --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.inference.action.GetInferenceModelAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestGetInferenceModelAction extends BaseRestHandler { + @Override + public String getName() { + return "get_inference_model_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "_inference/{task_type}/{model_id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String taskType = restRequest.param("task_type"); + String modelId = restRequest.param("model_id"); + + var request = new GetInferenceModelAction.Request(modelId, taskType); + return channel -> client.execute(GetInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java new file mode 100644 index 0000000000000..9d7a0d331b2b3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.inference.action.InferenceAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestInferenceAction extends BaseRestHandler { + @Override + public String getName() { + return "inference_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "_inference/{task_type}/{model_id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String taskType = restRequest.param("task_type"); + String modelId = restRequest.param("model_id"); + var request = InferenceAction.Request.parseRequest(modelId, taskType, restRequest.contentParser()); + + return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java new file mode 100644 index 0000000000000..cf0eb857feba9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.inference.action.PutInferenceModelAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestPutInferenceModelAction extends BaseRestHandler { + @Override + public String getName() { + return "put_inference_model_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "_inference/{task_type}/{model_id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String taskType = restRequest.param("task_type"); + String modelId = restRequest.param("model_id"); + + var request = new PutInferenceModelAction.Request(taskType, modelId, restRequest.requiredContent(), restRequest.getXContentType()); + return channel -> client.execute(PutInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/InferenceResult.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/InferenceResult.java new file mode 100644 index 0000000000000..8d8351dbe38d3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/InferenceResult.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.xcontent.ToXContentFragment; + +public interface InferenceResult extends ToXContentFragment, VersionedNamedWriteable {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResult.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResult.java new file mode 100644 index 0000000000000..3f84c91b055a1 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResult.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class SparseEmbeddingResult implements InferenceResult { + + public static final String NAME = "sparse_embedding_result"; + + private final List weightedTokens; + + public SparseEmbeddingResult(List weightedTokens) { + this.weightedTokens = weightedTokens; + } + + public SparseEmbeddingResult(StreamInput in) throws IOException { + this.weightedTokens = in.readCollectionAsImmutableList(TextExpansionResults.WeightedToken::new); + } + + public List getWeightedTokens() { + return weightedTokens; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("sparse_embedding"); + for (var weightedToken : weightedTokens) { + weightedToken.toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_500_074; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(weightedTokens); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SparseEmbeddingResult that = (SparseEmbeddingResult) o; + return Objects.equals(weightedTokens, that.weightedTokens); + } + + @Override + public int hashCode() { + return Objects.hash(weightedTokens); + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/InferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/InferenceService.java new file mode 100644 index 0000000000000..18704f4c32740 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/InferenceService.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.TaskType; +import org.elasticsearch.xpack.inference.results.InferenceResult; + +import java.util.Map; + +public interface InferenceService { + + String name(); + + /** + * Parse model configuration from the {@code config map} and return + * the parsed {@link Model}. + * This function modifies {@code config map}, fields are removed + * from the map as they are read. + * + * If the map contains unrecognized configuration option an + * {@code ElasticsearchStatusException} is thrown. + * + * @param modelId Model Id + * @param taskType The model task type + * @param config Configuration options + * @return The parsed {@link Model} + */ + Model parseConfigStrict(String modelId, TaskType taskType, Map config); + + /** + * As {@link #parseConfigStrict(String, TaskType, Map)} but the function + * does not throw on unrecognized options. + * + * @param modelId Model Id + * @param taskType The model task type + * @param config Configuration options + * @return The parsed {@link Model} + */ + Model parseConfigLenient(String modelId, TaskType taskType, Map config); + + /** + * Start or prepare the model for use. + * @param model The model + * @param listener The listener + */ + void start(Model model, ActionListener listener); + + /** + * Perform inference on the model. + * + * @param model Model configuration + * @param input Inference input + * @param requestTaskSettings Settings in the request to override the model's defaults + * @param listener Inference result listener + */ + void infer(Model model, String input, Map requestTaskSettings, ActionListener listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java new file mode 100644 index 0000000000000..c2b4986b84dc5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.rest.RestStatus; + +import java.util.Map; + +public class MapParsingUtils { + /** + * Remove the object from the map and cast to the expected type. + * If the object cannot be cast to type an ElasticsearchStatusException + * is thrown. + * + * @param sourceMap Map containing fields + * @param key The key of the object to remove + * @param type The expected type of the removed object + * @return {@code null} if not present else the object cast to type T + * @param The expected type + */ + @SuppressWarnings("unchecked") + public static T removeAsType(Map sourceMap, String key, Class type) { + Object o = sourceMap.remove(key); + if (o == null) { + return null; + } + + if (type.isAssignableFrom(o.getClass())) { + return (T) o; + } else { + throw new ElasticsearchStatusException( + "field [{}] is not of the expected type." + " The value [{}] cannot be converted to a [{}]", + RestStatus.BAD_REQUEST, + key, + o, + type.getSimpleName() + ); + } + } + + @SuppressWarnings("unchecked") + public static Map removeFromMapOrThrowIfNull(Map sourceMap, String fieldName) { + Map value = (Map) sourceMap.remove(fieldName); + if (value == null) { + throw new ElasticsearchStatusException("Missing required field [{}]", RestStatus.BAD_REQUEST, fieldName); + } + return value; + } + + public static ElasticsearchStatusException unknownSettingsError(Map config, String serviceName) { + // TOOD map as JSON + return new ElasticsearchStatusException( + "Model configuration contains settings [{}] unknown to the [{}] service", + RestStatus.BAD_REQUEST, + config, + serviceName + ); + } + + public static String missingSettingErrorMsg(String settingName, String scope) { + return Strings.format("[%s] does not contain the required setting [%s]", scope, settingName); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java new file mode 100644 index 0000000000000..499a336c5d1a6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.TaskType; + +public class ElserMlNodeModel extends Model { + + public ElserMlNodeModel( + String modelId, + TaskType taskType, + String service, + ElserMlNodeServiceSettings serviceSettings, + ElserMlNodeTaskSettings taskSettings + ) { + super(modelId, taskType, service, serviceSettings, taskSettings); + } + + @Override + public ElserMlNodeServiceSettings getServiceSettings() { + return (ElserMlNodeServiceSettings) super.getServiceSettings(); + } + + @Override + public ElserMlNodeTaskSettings getTaskSettings() { + return (ElserMlNodeTaskSettings) super.getTaskSettings(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java new file mode 100644 index 0000000000000..7c542e8acd22b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.TaskType; +import org.elasticsearch.xpack.inference.results.InferenceResult; +import org.elasticsearch.xpack.inference.results.SparseEmbeddingResult; +import org.elasticsearch.xpack.inference.services.InferenceService; +import org.elasticsearch.xpack.inference.services.MapParsingUtils; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; +import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; + +public class ElserMlNodeService implements InferenceService { + + public static final String NAME = "elser_mlnode"; + + private static final String ELSER_V1_MODEL = ".elser_model_1"; + + public static ElserMlNodeModel parseConfig( + boolean throwOnUnknownFields, + String modelId, + TaskType taskType, + Map settings + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, Model.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(settings, Model.TASK_SETTINGS); + + var serviceSettings = serviceSettingsFromMap(serviceSettingsMap); + var taskSettings = taskSettingsFromMap(taskType, taskSettingsMap); + + if (throwOnUnknownFields == false) { + throwIfNotEmptyMap(settings); + throwIfNotEmptyMap(serviceSettingsMap); + throwIfNotEmptyMap(taskSettingsMap); + } + + return new ElserMlNodeModel(modelId, taskType, NAME, serviceSettings, taskSettings); + } + + private final OriginSettingClient client; + + public ElserMlNodeService(Client client) { + this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); + } + + @Override + public ElserMlNodeModel parseConfigStrict(String modelId, TaskType taskType, Map config) { + return parseConfig(true, modelId, taskType, config); + } + + @Override + public ElserMlNodeModel parseConfigLenient(String modelId, TaskType taskType, Map config) { + return parseConfig(false, modelId, taskType, config); + } + + @Override + public void start(Model model, ActionListener listener) { + if (model instanceof ElserMlNodeModel == false) { + listener.onFailure(new IllegalStateException("Error starting model, [" + model.getModelId() + "] is not an elser model")); + return; + } + + if (model.getTaskType() != TaskType.SPARSE_EMBEDDING) { + listener.onFailure(new IllegalStateException(unsupportedTaskTypeErrorMsg(model.getTaskType()))); + return; + } + + var elserModel = (ElserMlNodeModel) model; + var serviceSettings = elserModel.getServiceSettings(); + + var startRequest = new StartTrainedModelDeploymentAction.Request(ELSER_V1_MODEL, model.getModelId()); + startRequest.setNumberOfAllocations(serviceSettings.getNumAllocations()); + startRequest.setThreadsPerAllocation(serviceSettings.getNumThreads()); + startRequest.setWaitForState(STARTED); + + client.execute( + StartTrainedModelDeploymentAction.INSTANCE, + startRequest, + ActionListener.wrap(r -> listener.onResponse(Boolean.TRUE), listener::onFailure) + ); + } + + @Override + public void infer(Model model, String input, Map requestTaskSettings, ActionListener listener) { + // No task settings to override with requestTaskSettings + + if (model.getTaskType() != TaskType.SPARSE_EMBEDDING) { + listener.onFailure(new ElasticsearchStatusException(unsupportedTaskTypeErrorMsg(model.getTaskType()), RestStatus.BAD_REQUEST)); + return; + } + + var request = InferTrainedModelDeploymentAction.Request.forTextInput( + model.getModelId(), + TextExpansionConfigUpdate.EMPTY_UPDATE, + List.of(input), + TimeValue.timeValueSeconds(10) // TODO get timeout from request + ); + client.execute(InferTrainedModelDeploymentAction.INSTANCE, request, ActionListener.wrap(inferenceResult -> { + var textExpansionResult = (TextExpansionResults) inferenceResult.getResults().get(0); + var sparseEmbeddingResult = new SparseEmbeddingResult(textExpansionResult.getWeightedTokens()); + listener.onResponse(sparseEmbeddingResult); + }, listener::onFailure)); + } + + private static ElserMlNodeServiceSettings serviceSettingsFromMap(Map config) { + return ElserMlNodeServiceSettings.fromMap(config); + } + + private static ElserMlNodeTaskSettings taskSettingsFromMap(TaskType taskType, Map config) { + if (taskType != TaskType.SPARSE_EMBEDDING) { + throw new ElasticsearchStatusException(unsupportedTaskTypeErrorMsg(taskType), RestStatus.BAD_REQUEST); + } + + // no config options yet + throwIfNotEmptyMap(config); + + return ElserMlNodeTaskSettings.DEFAULT; + } + + @Override + public String name() { + return NAME; + } + + private static void throwIfNotEmptyMap(Map settingsMap) { + if (settingsMap.isEmpty() == false) { + throw MapParsingUtils.unknownSettingsError(settingsMap, NAME); + } + } + + private static String unsupportedTaskTypeErrorMsg(TaskType taskType) { + return "The [" + NAME + "] service does not support task type [" + taskType + "]"; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java new file mode 100644 index 0000000000000..1d6a5106a1959 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.ServiceSettings; +import org.elasticsearch.xpack.inference.services.MapParsingUtils; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class ElserMlNodeServiceSettings implements ServiceSettings { + + public static final String NAME = "elser_mlnode_service_settings"; + public static final String NUM_ALLOCATIONS = "num_allocations"; + public static final String NUM_THREADS = "num_threads"; + + private final int numAllocations; + private final int numThreads; + + /** + * Parse the Elser service setting from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containg the config + * @return The {@code ElserMlNodeServiceSettings} + */ + public static ElserMlNodeServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + Integer numAllocations = MapParsingUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); + Integer numThreads = MapParsingUtils.removeAsType(map, NUM_THREADS, Integer.class); + + if (numAllocations == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, Model.SERVICE_SETTINGS)); + } else if (numAllocations < 1) { + validationException.addValidationError(mustBeAPositiveNumberError(NUM_ALLOCATIONS, numAllocations)); + } + + if (numThreads == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, Model.SERVICE_SETTINGS)); + } else if (numThreads < 1) { + validationException.addValidationError(mustBeAPositiveNumberError(NUM_THREADS, numThreads)); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new ElserMlNodeServiceSettings(numAllocations, numThreads); + } + + public ElserMlNodeServiceSettings(int numAllocations, int numThreads) { + this.numAllocations = numAllocations; + this.numThreads = numThreads; + } + + public ElserMlNodeServiceSettings(StreamInput in) throws IOException { + numAllocations = in.readVInt(); + numThreads = in.readVInt(); + } + + public int getNumAllocations() { + return numAllocations; + } + + public int getNumThreads() { + return numThreads; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NUM_ALLOCATIONS, numAllocations); + builder.field(NUM_THREADS, numThreads); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_500_074; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(numAllocations); + out.writeVInt(numThreads); + } + + @Override + public int hashCode() { + return Objects.hash(numAllocations, numThreads); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ElserMlNodeServiceSettings that = (ElserMlNodeServiceSettings) o; + return numAllocations == that.numAllocations && numThreads == that.numThreads; + } + + private static String mustBeAPositiveNumberError(String settingName, int value) { + return "Invalid value [" + value + "]. [" + settingName + "] must be a positive integer"; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java new file mode 100644 index 0000000000000..f4c75683783f0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.TaskSettings; + +import java.io.IOException; +import java.util.Objects; + +public class ElserMlNodeTaskSettings implements TaskSettings { + + public static final String NAME = "elser_mlnode_task_settings"; + + public static ElserMlNodeTaskSettings DEFAULT = new ElserMlNodeTaskSettings(); + + public ElserMlNodeTaskSettings() {} + + public ElserMlNodeTaskSettings(StreamInput in) {} + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_500_074; + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + // TODO Class has no members all instances are equivalent + // Return the hash of NAME to make the serialization tests poss + return Objects.hash(NAME); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java new file mode 100644 index 0000000000000..57f4c0650b932 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; + +public class ModelTests extends AbstractWireSerializingTestCase { + + public static Model createRandomInstance() { + // TODO randomise task types and settings + var taskType = TaskType.SPARSE_EMBEDDING; + return new Model(randomAlphaOfLength(6), taskType, randomAlphaOfLength(6), randomServiceSettings(), randomTaskSettings(taskType)); + } + + public static Model mutateTestInstance(Model instance) { + switch (randomIntBetween(0, 2)) { + case 0 -> new Model( + instance.getModelId() + "foo", + instance.getTaskType(), + instance.getService(), + instance.getServiceSettings(), + instance.getTaskSettings() + ); + case 1 -> new Model( + instance.getModelId(), + TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length], + instance.getService(), + instance.getServiceSettings(), + instance.getTaskSettings() + ); + case 2 -> new Model( + instance.getModelId(), + instance.getTaskType(), + instance.getService() + "bar", + instance.getServiceSettings(), + instance.getTaskSettings() + ); + default -> throw new IllegalStateException(); + } + return null; + } + + private static ServiceSettings randomServiceSettings() { + return ElserMlNodeServiceSettingsTests.createRandom(); + } + + private static TaskSettings randomTaskSettings(TaskType taskType) { + return ElserMlNodeTaskSettings.DEFAULT; // only 1 implementation + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } + + @Override + protected Writeable.Reader instanceReader() { + return Model::new; + } + + @Override + protected Model createTestInstance() { + return createRandomInstance(); + } + + @Override + protected Model mutateInstance(Model instance) { + return mutateTestInstance(instance); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java new file mode 100644 index 0000000000000..22a4981e092dc --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.TaskType; + +public class GetInferenceModelRequestTests extends AbstractWireSerializingTestCase { + + public static GetInferenceModelAction.Request randomTestInstance() { + return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values()).toString()); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetInferenceModelAction.Request::new; + } + + @Override + protected GetInferenceModelAction.Request createTestInstance() { + return randomTestInstance(); + } + + @Override + protected GetInferenceModelAction.Request mutateInstance(GetInferenceModelAction.Request instance) { + return switch (randomIntBetween(0, 1)) { + case 0 -> new GetInferenceModelAction.Request(instance.getModelId() + "foo", instance.getTaskType().toString()); + case 1 -> { + var nextTaskType = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; + yield new GetInferenceModelAction.Request(instance.getModelId(), nextTaskType.toString()); + } + default -> throw new UnsupportedOperationException(); + }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java new file mode 100644 index 0000000000000..f937ba03ae864 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.TaskType; + +import java.io.IOException; +import java.util.HashMap; + +public class InferenceActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return InferenceAction.Request::new; + } + + @Override + protected InferenceAction.Request createTestInstance() { + return new InferenceAction.Request( + randomFrom(TaskType.values()), + randomAlphaOfLength(6), + randomAlphaOfLength(8), + randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))) + ); + } + + @Override + protected InferenceAction.Request mutateInstance(InferenceAction.Request instance) throws IOException { + int select = randomIntBetween(0, 3); + return switch (select) { + case 0 -> { + var nextTask = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; + yield new InferenceAction.Request(nextTask, instance.getModelId(), instance.getInput(), instance.getTaskSettings()); + } + case 1 -> new InferenceAction.Request( + instance.getTaskType(), + instance.getModelId() + "foo", + instance.getInput(), + instance.getTaskSettings() + ); + case 2 -> new InferenceAction.Request( + instance.getTaskType(), + instance.getModelId(), + instance.getInput() + "bar", + instance.getTaskSettings() + ); + case 3 -> { + var taskSettings = new HashMap<>(instance.getTaskSettings()); + if (taskSettings.isEmpty()) { + taskSettings.put("foo", "bar"); + } else { + var keyToRemove = taskSettings.keySet().iterator().next(); + taskSettings.remove(keyToRemove); + } + yield new InferenceAction.Request(instance.getTaskType(), instance.getModelId(), instance.getInput(), taskSettings); + } + default -> { + throw new UnsupportedOperationException(); + } + }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java new file mode 100644 index 0000000000000..13896607fe9ab --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultTests; + +import java.io.IOException; + +public class InferenceActionResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } + + @Override + protected Writeable.Reader instanceReader() { + return InferenceAction.Response::new; + } + + @Override + protected InferenceAction.Response createTestInstance() { + return new InferenceAction.Response(SparseEmbeddingResultTests.createRandomResult()); + } + + @Override + protected InferenceAction.Response mutateInstance(InferenceAction.Response instance) throws IOException { + return null; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java new file mode 100644 index 0000000000000..770faf19585c5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.TaskType; + +public class PutInferenceModelRequestTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return PutInferenceModelAction.Request::new; + } + + @Override + protected PutInferenceModelAction.Request createTestInstance() { + return new PutInferenceModelAction.Request( + randomFrom(TaskType.values()).toString(), + randomAlphaOfLength(6), + randomBytesReference(50), + randomFrom(XContentType.values()) + ); + } + + @Override + protected PutInferenceModelAction.Request mutateInstance(PutInferenceModelAction.Request instance) { + return switch (randomIntBetween(0, 3)) { + case 0 -> new PutInferenceModelAction.Request( + TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length].toString(), + instance.getModelId(), + instance.getContent(), + instance.getContentType() + ); + case 1 -> new PutInferenceModelAction.Request( + instance.getTaskType().toString(), + instance.getModelId() + "foo", + instance.getContent(), + instance.getContentType() + ); + case 2 -> new PutInferenceModelAction.Request( + instance.getTaskType().toString(), + instance.getModelId(), + randomBytesReference(instance.getContent().length() + 1), + instance.getContentType() + ); + case 3 -> new PutInferenceModelAction.Request( + instance.getTaskType().toString(), + instance.getModelId(), + instance.getContent(), + XContentType.values()[(instance.getContentType().ordinal() + 1) % XContentType.values().length] + ); + default -> throw new IllegalStateException(); + }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java new file mode 100644 index 0000000000000..1e8c05b7b05a8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.ModelTests; + +public class PutInferenceModelResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected PutInferenceModelAction.Response createTestInstance() { + return new PutInferenceModelAction.Response(ModelTests.createRandomInstance()); + } + + @Override + protected PutInferenceModelAction.Response mutateInstance(PutInferenceModelAction.Response instance) { + var mutatedModel = ModelTests.mutateTestInstance(instance.getModel()); + return new PutInferenceModelAction.Response(mutatedModel); + } + + @Override + protected Writeable.Reader instanceReader() { + return PutInferenceModelAction.Response::new; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(InferenceNamedWriteablesProvider.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ServiceRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ServiceRegistryTests.java new file mode 100644 index 0000000000000..492fb29f910b8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ServiceRegistryTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.registry; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; + +import static org.mockito.Mockito.mock; + +public class ServiceRegistryTests extends ESTestCase { + + public void testGetService() { + ServiceRegistry registry = new ServiceRegistry(mock(ElserMlNodeService.class)); + var service = registry.getService(ElserMlNodeService.NAME); + assertTrue(service.isPresent()); + } + + public void testGetUnknownService() { + ServiceRegistry registry = new ServiceRegistry(mock(ElserMlNodeService.class)); + var service = registry.getService("foo"); + assertFalse(service.isPresent()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultTests.java new file mode 100644 index 0000000000000..360dc3e97d141 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; + +import java.util.ArrayList; +import java.util.List; + +public class SparseEmbeddingResultTests extends AbstractWireSerializingTestCase { + + public static SparseEmbeddingResult createRandomResult() { + int numTokens = randomIntBetween(1, 20); + List tokenList = new ArrayList<>(); + for (int i = 0; i < numTokens; i++) { + tokenList.add(new TextExpansionResults.WeightedToken(Integer.toString(i), (float) randomDoubleBetween(0.0, 5.0, false))); + } + return new SparseEmbeddingResult(tokenList); + } + + @Override + protected Writeable.Reader instanceReader() { + return SparseEmbeddingResult::new; + } + + @Override + protected SparseEmbeddingResult createTestInstance() { + return createRandomResult(); + } + + @Override + protected SparseEmbeddingResult mutateInstance(SparseEmbeddingResult instance) { + if (instance.getWeightedTokens().size() > 0) { + var tokens = instance.getWeightedTokens(); + return new SparseEmbeddingResult(tokens.subList(0, tokens.size() - 1)); + } else { + return new SparseEmbeddingResult(List.of(new TextExpansionResults.WeightedToken("a", 1.0f))); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java new file mode 100644 index 0000000000000..7b693b2ef4c0f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; + +public class MapParsingUtilsTests extends ESTestCase { + + public void testRemoveAsTypeWithTheCorrectType() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 1.0)); + + Integer i = MapParsingUtils.removeAsType(map, "a", Integer.class); + assertEquals(Integer.valueOf(5), i); + assertNull(map.get("a")); // field has been removed + + String str = MapParsingUtils.removeAsType(map, "b", String.class); + assertEquals("a string", str); + assertNull(map.get("b")); + + Boolean b = MapParsingUtils.removeAsType(map, "c", Boolean.class); + assertEquals(Boolean.TRUE, b); + assertNull(map.get("c")); + + Double d = MapParsingUtils.removeAsType(map, "d", Double.class); + assertEquals(Double.valueOf(1.0), d); + assertNull(map.get("d")); + + assertThat(map.entrySet(), empty()); + } + + public void testRemoveAsTypeWithInCorrectType() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); + + var e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "a", String.class)); + assertThat( + e.getMessage(), + containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") + ); + + e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "b", Boolean.class)); + assertThat( + e.getMessage(), + containsString("field [b] is not of the expected type. The value [a string] cannot be converted to a [Boolean]") + ); + assertNull(map.get("b")); + + e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "c", Integer.class)); + assertThat( + e.getMessage(), + containsString("field [c] is not of the expected type. The value [true] cannot be converted to a [Integer]") + ); + assertNull(map.get("c")); + + // cannot convert double to integer + e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "d", Integer.class)); + assertThat( + e.getMessage(), + containsString("field [d] is not of the expected type. The value [5.0] cannot be converted to a [Integer]") + ); + assertNull(map.get("d")); + + // cannot convert integer to double + e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "e", Double.class)); + assertThat( + e.getMessage(), + containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") + ); + assertNull(map.get("d")); + + assertThat(map.entrySet(), empty()); + } + + public void testRemoveAsTypeMissingReturnsNull() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE)); + assertNull(MapParsingUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); + assertThat(map.entrySet(), hasSize(3)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java new file mode 100644 index 0000000000000..5ffc2347b63e6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; + +public class ElserMlNodeServiceSettingsTests extends AbstractWireSerializingTestCase { + + public static ElserMlNodeServiceSettings createRandom() { + return new ElserMlNodeServiceSettings(randomIntBetween(1, 4), randomIntBetween(1, 2)); + } + + public void testFromMap() { + var serviceSettings = ElserMlNodeServiceSettings.fromMap( + new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) + ); + assertEquals(new ElserMlNodeServiceSettings(1, 4), serviceSettings); + } + + public void testFromMapMissingOptions() { + var e = expectThrows( + ValidationException.class, + () -> ElserMlNodeServiceSettings.fromMap(new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1))) + ); + + assertThat(e.getMessage(), containsString("[service_settings] does not contain the required setting [num_threads]")); + + e = expectThrows( + ValidationException.class, + () -> ElserMlNodeServiceSettings.fromMap(new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_THREADS, 1))) + ); + + assertThat(e.getMessage(), containsString("[service_settings] does not contain the required setting [num_allocations]")); + } + + public void testFromMapInvalidSettings() { + var settingsMap = new HashMap( + Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 0, ElserMlNodeServiceSettings.NUM_THREADS, -1) + ); + var e = expectThrows(ValidationException.class, () -> ElserMlNodeServiceSettings.fromMap(settingsMap)); + + assertThat(e.getMessage(), containsString("Invalid value [0]. [num_allocations] must be a positive integer")); + assertThat(e.getMessage(), containsString("Invalid value [-1]. [num_threads] must be a positive integer")); + } + + @Override + protected Writeable.Reader instanceReader() { + return ElserMlNodeServiceSettings::new; + } + + @Override + protected ElserMlNodeServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected ElserMlNodeServiceSettings mutateInstance(ElserMlNodeServiceSettings instance) { + return switch (randomIntBetween(0, 1)) { + case 0 -> new ElserMlNodeServiceSettings(instance.getNumAllocations() + 1, instance.getNumThreads()); + case 1 -> new ElserMlNodeServiceSettings(instance.getNumAllocations(), instance.getNumThreads() + 1); + default -> throw new IllegalStateException(); + }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java new file mode 100644 index 0000000000000..008e6a8c17653 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.Model; +import org.elasticsearch.xpack.inference.TaskType; + +public class ElserMlNodeServiceTests extends ESTestCase { + + public static Model randomModelConfig(String modelId, TaskType taskType) { + return switch (taskType) { + case SPARSE_EMBEDDING -> new ElserMlNodeModel( + modelId, + taskType, + ElserMlNodeService.NAME, + ElserMlNodeServiceSettingsTests.createRandom(), + ElserMlNodeTaskSettingsTests.createRandom() + ); + default -> throw new IllegalArgumentException("task type " + taskType + " is not supported"); + }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettingsTests.java new file mode 100644 index 0000000000000..d55065a5f9b27 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettingsTests.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class ElserMlNodeTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static ElserMlNodeTaskSettings createRandom() { + return ElserMlNodeTaskSettings.DEFAULT; // no options to randomise + } + + @Override + protected Writeable.Reader instanceReader() { + return ElserMlNodeTaskSettings::new; + } + + @Override + protected ElserMlNodeTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected ElserMlNodeTaskSettings mutateInstance(ElserMlNodeTaskSettings instance) { + return null; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index d359753992e0c..61c953e7f1c13 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -130,6 +130,9 @@ public class Constants { "cluster:admin/xpack/enrich/get", "cluster:admin/xpack/enrich/put", "cluster:admin/xpack/enrich/reindex", + "cluster:admin/xpack/inference/delete", + "cluster:admin/xpack/inference/get", + "cluster:admin/xpack/inference/put", "cluster:admin/xpack/license/basic_status", // "cluster:admin/xpack/license/delete", "cluster:admin/xpack/license/feature_usage", @@ -318,6 +321,7 @@ public class Constants { "cluster:monitor/xpack/enrich/stats", "cluster:monitor/xpack/eql/stats/dist", "cluster:monitor/xpack/esql/stats/dist", + "cluster:monitor/xpack/inference", "cluster:monitor/xpack/info", "cluster:monitor/xpack/info/aggregate_metric", "cluster:monitor/xpack/info/analytics", diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index cd140638eed89..d93ee6ad36c67 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -32,6 +32,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.FLEET_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.IDP_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.INDEX_LIFECYCLE_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.LOGSTASH_MANAGEMENT_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; @@ -148,6 +149,7 @@ public static void switchUserBasedOnActionOriginAndExecute( case LOGSTASH_MANAGEMENT_ORIGIN: case FLEET_ORIGIN: case ENT_SEARCH_ORIGIN: + case INFERENCE_ORIGIN: case TASKS_ORIGIN: // TODO use a more limited user for tasks securityContext.executeAsInternalUser(InternalUsers.XPACK_USER, version, consumer); break; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml new file mode 100644 index 0000000000000..af67a099085fd --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -0,0 +1,41 @@ +--- +"Test get missing model": + - do: + catch: missing + inference.get_model: + task_type: sparse_embedding + model_id: model_to_get + - match: { error.type: "resource_not_found_exception" } + - match: { error.reason: "Model not found [model_to_get]" } + +--- +"Test put model with bad task type": + - do: + catch: bad_request + inference.put_model: + task_type: bad + model_id: elser_model + body: > + { + "service": "elser_mlnode", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": { + } + } + - match: { error.reason: "Unknown task_type [bad]" } + +--- +"Test inference with bad task type": + - do: + catch: bad_request + inference.inference: + task_type: bad + model_id: elser_model + body: > + { + "input": "important text" + } + - match: { error.reason: "Unknown task_type [bad]" } From 3d430444f94db1bf9689fbd80a80e1127c9080b9 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 12 Sep 2023 16:55:24 -0400 Subject: [PATCH 027/114] Provide better error messages from kv processor (#99493) --- .../ingest/common/KeyValueProcessor.java | 43 ++++++++++++++++++- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java index 6b63980b91b5a..8c90beed4d01c 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -8,6 +8,9 @@ package org.elasticsearch.ingest.common; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; @@ -28,6 +31,8 @@ */ public final class KeyValueProcessor extends AbstractProcessor { + private static final Logger logger = LogManager.getLogger(KeyValueProcessor.class); + public static final String TYPE = "kv"; private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)"); @@ -164,12 +169,40 @@ private static Consumer buildExecution( }; } + /** + * Helper method for buildTrimmer and buildSplitter. + *

+ * If trace logging is enabled, then we should log the stacktrace (and so the message can be slightly simpler). + * On the other hand if trace logging isn't enabled, then we'll need to log some context on the original issue (but not a stacktrace). + *

+ * Regardless of the logging level, we should throw an exception that has the context in its message, which this method builds. + */ + private static ElasticsearchException logAndBuildException(String message, Throwable error) { + String cause = error.getClass().getName(); + if (error.getMessage() != null) { + cause += ": " + error.getMessage(); + } + String longMessage = message + ": " + cause; + if (logger.isTraceEnabled()) { + logger.trace(message, error); + } else { + logger.warn(longMessage); + } + return new ElasticsearchException(longMessage); + } + private static Function buildTrimmer(String trim) { if (trim == null) { return val -> val; } else { Pattern pattern = Pattern.compile("(^([" + trim + "]+))|([" + trim + "]+$)"); - return val -> pattern.matcher(val).replaceAll(""); + return val -> { + try { + return pattern.matcher(val).replaceAll(""); + } catch (Exception | StackOverflowError error) { + throw logAndBuildException("Error trimming [" + val + "] using pattern [" + trim + "]", error); + } + }; } } @@ -177,7 +210,13 @@ private static Function buildSplitter(String split, boolean fi int limit = fields ? 0 : 2; if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') { Pattern splitPattern = Pattern.compile(split); - return val -> splitPattern.split(val, limit); + return val -> { + try { + return splitPattern.split(val, limit); + } catch (Exception | StackOverflowError error) { + throw logAndBuildException("Error splitting [" + val + "] using pattern [" + split + "]", error); + } + }; } else { return val -> val.split(split, limit); } From 61000ace86db78e78875a0c9709eb155c623b293 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Tue, 12 Sep 2023 16:56:32 -0400 Subject: [PATCH 028/114] Update pull-requests.json --- .buildkite/pull-requests.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 466b69e008241..6aed0610caa11 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -12,7 +12,9 @@ "build_on_commit": true, "build_on_comment": true, "trigger_comment_regex": "buildkite\\W+elasticsearch-ci.+", - "labels": ["buildkite-opt-in"] + "labels": ["buildkite-opt-in"], + "cancel_intermediate_builds": true, + "cancel_intermediate_builds_on_comment": false } ] } From ebe00a61dc15c30cb29b64ce4dd1309ffc6e5013 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 12 Sep 2023 17:07:18 -0700 Subject: [PATCH 029/114] Infer index version for 8.10 patches (#99501) This commit fixes a missed poison pill for inferring index version. The changed logic allows inferring to work on 8.10 patches, not just 8.10.0. see #99003 --- .../java/org/elasticsearch/cluster/node/VersionInformation.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java b/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java index 85d17dabed781..a2e2e801db958 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java @@ -32,7 +32,7 @@ public static VersionInformation inferVersions(Version nodeVersion) { return null; } else if (nodeVersion.equals(Version.CURRENT)) { return CURRENT; - } else if (nodeVersion.onOrBefore(Version.V_8_10_0)) { + } else if (nodeVersion.before(Version.V_8_11_0)) { return new VersionInformation( nodeVersion, IndexVersion.getMinimumCompatibleIndexVersion(nodeVersion.id), From aff49dc69a489a40f2aa4a2ae8c31b3c4825b2a8 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 13 Sep 2023 10:38:59 +0200 Subject: [PATCH 030/114] Mute DockerTests.test600Interrupt (#99509) relates #99508 --- .../test/java/org/elasticsearch/packaging/test/DockerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index a1b71e2175944..d0ff4d2d986e6 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1222,6 +1222,7 @@ public void test500Readiness() throws Exception { assertTrue(readinessProbe(9399)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99508") public void test600Interrupt() { waitForElasticsearch(installation, "elastic", PASSWORD); final Result containerLogs = getContainerLogs(); From 10c8f2abe6c579958ce9c6fdf429261660dfd2a2 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 13 Sep 2023 05:37:24 -0400 Subject: [PATCH 031/114] Remove `x-pack/docs` dir (#99304) **Problem:** With https://github.com/elastic/elasticsearch/pull/99209, the elasticsearch repo's `x-pack/docs` directory no longer contains docs for the Elasticsearch Guide in 8.10+. The `gradle.build` files in the directory were only left in so that the docs build doesn't break. That's been fixed with https://github.com/elastic/docs/pull/2753. **Solution:** Removes the `x-pack/docs` directory. Depends on https://github.com/elastic/elasticsearch/pull/99209, https://github.com/elastic/docs/pull/2753 Closes: https://github.com/elastic/platform-docs-team/issues/208 --- x-pack/docs/build.gradle | 840 ------------------ .../smoketest/XDocsClientYamlTestSuiteIT.java | 155 ---- 2 files changed, 995 deletions(-) delete mode 100644 x-pack/docs/build.gradle delete mode 100644 x-pack/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle deleted file mode 100644 index f3624c2b41894..0000000000000 --- a/x-pack/docs/build.gradle +++ /dev/null @@ -1,840 +0,0 @@ -apply plugin: 'elasticsearch.docs-test' -apply plugin: 'elasticsearch.rest-resources' - -/* List of files that have snippets that probably should be converted to - * `// CONSOLE` and `// TESTRESPONSE` but have yet to be converted. Try and - * only remove entries from this list. When it is empty we'll remove it - * entirely and have a party! There will be cake and everything.... */ -// tasks.named("buildRestTests").configure { -// expectedUnconvertedCandidates = [ -// 'en/rest-api/watcher/put-watch.asciidoc', -// 'en/security/authentication/user-cache.asciidoc', -// 'en/security/authorization/run-as-privilege.asciidoc', -// 'en/security/ccs-clients-integrations/http.asciidoc', -// 'en/rest-api/watcher/stats.asciidoc', -// 'en/watcher/example-watches/watching-time-series-data.asciidoc', -// ] -// } - -dependencies { - yamlRestTestImplementation(testArtifact(project(xpackModule('core')))) - yamlRestTestImplementation(testArtifact(project(':x-pack:plugin'))) -} - -restResources { - restApi { - include '*' - } -} - -testClusters.matching { it.name == "yamlRestTest" }.configureEach { - extraConfigFile 'op-jwks.json', project(':x-pack:test:idp-fixture').file("oidc/op-jwks.json") - extraConfigFile 'idp-docs-metadata.xml', project(':x-pack:test:idp-fixture').file("idp/shibboleth-idp/metadata/idp-docs-metadata.xml") - extraConfigFile 'testClient.crt', project(':x-pack:plugin:security').file("src/test/resources/org/elasticsearch/xpack/security/action/pki_delegation/testClient.crt") - setting 'xpack.security.enabled', 'true' - setting 'xpack.security.authc.api_key.enabled', 'true' - setting 'xpack.security.authc.token.enabled', 'true' - // disable the ILM history for doc tests to avoid potential lingering tasks that'd cause test flakiness - setting 'indices.lifecycle.history_index_enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.authc.realms.file.file.order', '0' - setting 'xpack.security.authc.realms.native.native.order', '1' - setting 'xpack.security.authc.realms.oidc.oidc1.order', '2' - setting 'xpack.security.authc.realms.oidc.oidc1.op.issuer', 'http://127.0.0.1:8080' - setting 'xpack.security.authc.realms.oidc.oidc1.op.authorization_endpoint', "http://127.0.0.1:8080/c2id-login" - setting 'xpack.security.authc.realms.oidc.oidc1.op.token_endpoint', "http://127.0.0.1:8080/c2id/token" - setting 'xpack.security.authc.realms.oidc.oidc1.op.jwkset_path', 'op-jwks.json' - setting 'xpack.security.authc.realms.oidc.oidc1.rp.redirect_uri', 'https://my.fantastic.rp/cb' - setting 'xpack.security.authc.realms.oidc.oidc1.rp.client_id', 'elasticsearch-rp' - keystore 'xpack.security.authc.realms.oidc.oidc1.rp.client_secret', 'b07efb7a1cf6ec9462afe7b6d3ab55c6c7880262aa61ac28dded292aca47c9a2' - setting 'xpack.security.authc.realms.oidc.oidc1.rp.response_type', 'id_token' - setting 'xpack.security.authc.realms.oidc.oidc1.claims.principal', 'sub' - setting 'xpack.security.authc.realms.pki.pki1.order', '3' - setting 'xpack.security.authc.realms.pki.pki1.certificate_authorities', '[ "testClient.crt" ]' - setting 'xpack.security.authc.realms.pki.pki1.delegation.enabled', 'true' - setting 'xpack.security.authc.realms.saml.saml1.order', '4' - setting 'xpack.security.authc.realms.saml.saml1.sp.logout', 'https://kibana.org/logout' - setting 'xpack.security.authc.realms.saml.saml1.idp.entity_id', 'https://my-idp.org' - setting 'xpack.security.authc.realms.saml.saml1.idp.metadata.path', 'idp-docs-metadata.xml' - setting 'xpack.security.authc.realms.saml.saml1.sp.entity_id', 'https://kibana.org' - setting 'xpack.security.authc.realms.saml.saml1.sp.acs', 'https://kibana.org/api/security/saml/callback' - setting 'xpack.security.authc.realms.saml.saml1.attributes.principal', 'uid' - setting 'xpack.security.authc.realms.saml.saml1.attributes.name', 'urn:oid:2.5.4.3' - - user username: 'test_admin' - user username: 'test_user' -} - -tasks.named("buildRestTests").configure { buildRestTests -> - - buildRestTests.docs = fileTree(projectDir) { - // No snippets in here! - exclude 'build.gradle' - // That is where the snippets go, not where they come from! - exclude 'build/**' - // These file simply doesn't pass yet. We should figure out how to fix them. - exclude 'en/watcher/reference/actions.asciidoc' - exclude 'en/rest-api/security/ssl.asciidoc' - } - - Map setups = buildRestTests.setups - setups['my_inactive_watch'] = ''' - - do: - watcher.put_watch: - id: "my_watch" - active: false - body: > - { - "trigger": { - "schedule": { - "hourly": { - "minute": [ 0, 5 ] - } - } - }, - "input": { - "simple": { - "payload": { - "send": "yes" - } - } - }, - "condition": { - "always": {} - }, - "actions": { - "test_index": { - "index": { - "index": "test" - } - } - } - } - - match: { _id: "my_watch" } -''' - setups['my_active_watch'] = setups['my_inactive_watch'].replace( - 'active: false', 'active: true') - -// Used by SQL because it looks SQL-ish - setups['library'] = ''' - - do: - indices.create: - index: library - body: - settings: - number_of_shards: 1 - number_of_replicas: 1 - mappings: - book: - properties: - name: - type: text - fields: - keyword: - type: keyword - author: - type: text - fields: - keyword: - type: keyword - release_date: - type: date - page_count: - type: short - - do: - bulk: - index: library - type: book - refresh: true - body: | - {"index":{"_id": "Leviathan Wakes"}} - {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} - {"index":{"_id": "Hyperion"}} - {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} - {"index":{"_id": "Dune"}} - {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} - {"index":{"_id": "Dune Messiah"}} - {"name": "Dune Messiah", "author": "Frank Herbert", "release_date": "1969-10-15", "page_count": 331} - {"index":{"_id": "Children of Dune"}} - {"name": "Children of Dune", "author": "Frank Herbert", "release_date": "1976-04-21", "page_count": 408} - {"index":{"_id": "God Emperor of Dune"}} - {"name": "God Emperor of Dune", "author": "Frank Herbert", "release_date": "1981-05-28", "page_count": 454} - {"index":{"_id": "Consider Phlebas"}} - {"name": "Consider Phlebas", "author": "Iain M. Banks", "release_date": "1987-04-23", "page_count": 471} - {"index":{"_id": "Pandora's Star"}} - {"name": "Pandora's Star", "author": "Peter F. Hamilton", "release_date": "2004-03-02", "page_count": 768} - {"index":{"_id": "Revelation Space"}} - {"name": "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585} - {"index":{"_id": "A Fire Upon the Deep"}} - {"name": "A Fire Upon the Deep", "author": "Vernor Vinge", "release_date": "1992-06-01", "page_count": 613} - {"index":{"_id": "Ender's Game"}} - {"name": "Ender's Game", "author": "Orson Scott Card", "release_date": "1985-06-01", "page_count": 324} - {"index":{"_id": "1984"}} - {"name": "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328} - {"index":{"_id": "Fahrenheit 451"}} - {"name": "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227} - {"index":{"_id": "Brave New World"}} - {"name": "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268} - {"index":{"_id": "Foundation"}} - {"name": "Foundation", "author": "Isaac Asimov", "release_date": "1951-06-01", "page_count": 224} - {"index":{"_id": "The Giver"}} - {"name": "The Giver", "author": "Lois Lowry", "release_date": "1993-04-26", "page_count": 208} - {"index":{"_id": "Slaughterhouse-Five"}} - {"name": "Slaughterhouse-Five", "author": "Kurt Vonnegut", "release_date": "1969-06-01", "page_count": 275} - {"index":{"_id": "The Hitchhiker's Guide to the Galaxy"}} - {"name": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "release_date": "1979-10-12", "page_count": 180} - {"index":{"_id": "Snow Crash"}} - {"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} - {"index":{"_id": "Neuromancer"}} - {"name": "Neuromancer", "author": "William Gibson", "release_date": "1984-07-01", "page_count": 271} - {"index":{"_id": "The Handmaid's Tale"}} - {"name": "The Handmaid's Tale", "author": "Margaret Atwood", "release_date": "1985-06-01", "page_count": 311} - {"index":{"_id": "Starship Troopers"}} - {"name": "Starship Troopers", "author": "Robert A. Heinlein", "release_date": "1959-12-01", "page_count": 335} - {"index":{"_id": "The Left Hand of Darkness"}} - {"name": "The Left Hand of Darkness", "author": "Ursula K. Le Guin", "release_date": "1969-06-01", "page_count": 304} - {"index":{"_id": "The Moon is a Harsh Mistress"}} - {"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288} - -''' - setups['sample_job'] = ''' - - do: - ml.put_job: - job_id: "sample_job" - body: > - { - "description" : "Very basic job", - "analysis_config" : { - "bucket_span":"10m", - "detectors" :[ - { - "function": "count" - } - ]}, - "data_description" : { - "time_field":"timestamp", - "time_format": "epoch_ms" - } - } -''' - setups['farequote_index'] = ''' - - do: - indices.create: - index: farequote - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - metric: - properties: - time: - type: date - responsetime: - type: float - airline: - type: keyword - doc_count: - type: integer -''' - setups['farequote_data'] = setups['farequote_index'] + ''' - - do: - bulk: - index: farequote - type: metric - refresh: true - body: | - {"index": {"_id":"1"}} - {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5} - {"index": {"_id":"2"}} - {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23} - {"index": {"_id":"3"}} - {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42} -''' - setups['farequote_job'] = setups['farequote_data'] + ''' - - do: - ml.put_job: - job_id: "farequote" - body: > - { - "analysis_config": { - "bucket_span": "60m", - "detectors": [{ - "function": "mean", - "field_name": "responsetime", - "by_field_name": "airline" - }], - "summary_count_field_name": "doc_count" - }, - "data_description": { - "time_field": "time" - } - } -''' - setups['farequote_datafeed'] = setups['farequote_job'] + ''' - - do: - ml.put_datafeed: - datafeed_id: "datafeed-farequote" - body: > - { - "job_id":"farequote", - "indexes":"farequote" - } -''' - setups['ml_filter_safe_domains'] = ''' - - do: - ml.put_filter: - filter_id: "safe_domains" - body: > - { - "description": "A list of safe domains", - "items": ["*.google.com", "wikipedia.org"] - } -''' - setups['server_metrics_index'] = ''' - - do: - indices.create: - index: server-metrics - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - metric: - properties: - timestamp: - type: date - total: - type: long -''' - setups['server_metrics_data'] = setups['server_metrics_index'] + ''' - - do: - bulk: - index: server-metrics - type: metric - refresh: true - body: | - {"index": {"_id":"1177"}} - {"timestamp":"2017-03-23T13:00:00","total":40476} - {"index": {"_id":"1178"}} - {"timestamp":"2017-03-23T13:00:00","total":15287} - {"index": {"_id":"1179"}} - {"timestamp":"2017-03-23T13:00:00","total":-776} - {"index": {"_id":"1180"}} - {"timestamp":"2017-03-23T13:00:00","total":11366} - {"index": {"_id":"1181"}} - {"timestamp":"2017-03-23T13:00:00","total":3606} - {"index": {"_id":"1182"}} - {"timestamp":"2017-03-23T13:00:00","total":19006} - {"index": {"_id":"1183"}} - {"timestamp":"2017-03-23T13:00:00","total":38613} - {"index": {"_id":"1184"}} - {"timestamp":"2017-03-23T13:00:00","total":19516} - {"index": {"_id":"1185"}} - {"timestamp":"2017-03-23T13:00:00","total":-258} - {"index": {"_id":"1186"}} - {"timestamp":"2017-03-23T13:00:00","total":9551} - {"index": {"_id":"1187"}} - {"timestamp":"2017-03-23T13:00:00","total":11217} - {"index": {"_id":"1188"}} - {"timestamp":"2017-03-23T13:00:00","total":22557} - {"index": {"_id":"1189"}} - {"timestamp":"2017-03-23T13:00:00","total":40508} - {"index": {"_id":"1190"}} - {"timestamp":"2017-03-23T13:00:00","total":11887} - {"index": {"_id":"1191"}} - {"timestamp":"2017-03-23T13:00:00","total":31659} -''' - setups['server_metrics_job'] = setups['server_metrics_data'] + ''' - - do: - ml.put_job: - job_id: "total-requests" - body: > - { - "description" : "Total sum of requests", - "analysis_config" : { - "bucket_span":"10m", - "detectors" :[ - { - "detector_description": "Sum of total", - "function": "sum", - "field_name": "total" - } - ]}, - "data_description" : { - "time_field":"timestamp", - "time_format": "epoch_ms" - } - } -''' - setups['server_metrics_datafeed'] = setups['server_metrics_job'] + ''' - - do: - ml.put_datafeed: - datafeed_id: "datafeed-total-requests" - body: > - { - "job_id":"total-requests", - "indexes":"server-metrics" - } -''' - setups['server_metrics_openjob'] = setups['server_metrics_datafeed'] + ''' - - do: - ml.open_job: - job_id: "total-requests" -''' - setups['server_metrics_startdf'] = setups['server_metrics_openjob'] + ''' - - do: - ml.start_datafeed: - datafeed_id: "datafeed-total-requests" -''' - setups['calendar_outages'] = ''' - - do: - ml.put_calendar: - calendar_id: "planned-outages" -''' - setups['calendar_outages_addevent'] = setups['calendar_outages'] + ''' - - do: - ml.post_calendar_events: - calendar_id: "planned-outages" - body: > - { "description": "event 1", "start_time": "2017-12-01T00:00:00Z", "end_time": "2017-12-02T00:00:00Z", "calendar_id": "planned-outages" } - - -''' - setups['calendar_outages_openjob'] = setups['server_metrics_openjob'] + ''' - - do: - ml.put_calendar: - calendar_id: "planned-outages" -''' - setups['calendar_outages_addjob'] = setups['server_metrics_openjob'] + ''' - - do: - ml.put_calendar: - calendar_id: "planned-outages" - body: > - { - "job_ids": ["total-requests"] - } -''' - setups['calendar_outages_addevent'] = setups['calendar_outages_addjob'] + ''' - - do: - ml.post_calendar_events: - calendar_id: "planned-outages" - body: > - { "events" : [ - { "description": "event 1", "start_time": "1513641600000", "end_time": "1513728000000"}, - { "description": "event 2", "start_time": "1513814400000", "end_time": "1513900800000"}, - { "description": "event 3", "start_time": "1514160000000", "end_time": "1514246400000"} - ]} -''' - setups['role_mapping'] = ''' - - do: - security.put_role_mapping: - name: "mapping1" - body: > - { - "enabled": true, - "roles": [ "user" ], - "rules": { "field": { "username": "*" } } - } -''' - setups['sensor_rollup_job'] = ''' - - do: - indices.create: - index: sensor-1 - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - timestamp: - type: date - temperature: - type: long - voltage: - type: float - node: - type: keyword - - do: - rollup.put_job: - id: "sensor" - body: > - { - "index_pattern": "sensor-*", - "rollup_index": "sensor_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node"] - } - }, - "metrics": [ - { - "field": "temperature", - "metrics": ["min", "max", "sum"] - }, - { - "field": "voltage", - "metrics": ["avg"] - } - ] - } -''' - setups['sensor_started_rollup_job'] = ''' - - do: - indices.create: - index: sensor-1 - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - timestamp: - type: date - temperature: - type: long - voltage: - type: float - node: - type: keyword - - - do: - bulk: - index: sensor-1 - refresh: true - body: | - {"index":{}} - {"timestamp": 1516729294000, "temperature": 200, "voltage": 5.2, "node": "a"} - {"index":{}} - {"timestamp": 1516642894000, "temperature": 201, "voltage": 5.8, "node": "b"} - {"index":{}} - {"timestamp": 1516556494000, "temperature": 202, "voltage": 5.1, "node": "a"} - {"index":{}} - {"timestamp": 1516470094000, "temperature": 198, "voltage": 5.6, "node": "b"} - {"index":{}} - {"timestamp": 1516383694000, "temperature": 200, "voltage": 4.2, "node": "c"} - {"index":{}} - {"timestamp": 1516297294000, "temperature": 202, "voltage": 4.0, "node": "c"} - - - do: - rollup.put_job: - id: "sensor" - body: > - { - "index_pattern": "sensor-*", - "rollup_index": "sensor_rollup", - "cron": "* * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node"] - } - }, - "metrics": [ - { - "field": "temperature", - "metrics": ["min", "max", "sum"] - }, - { - "field": "voltage", - "metrics": ["avg"] - } - ] - } - - do: - rollup.start_job: - id: "sensor" -''' - - setups['sensor_index'] = ''' - - do: - indices.create: - index: sensor-1 - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - timestamp: - type: date - temperature: - type: long - voltage: - type: float - node: - type: keyword - load: - type: double - net_in: - type: long - net_out: - type: long - hostname: - type: keyword - datacenter: - type: keyword -''' - - setups['sensor_prefab_data'] = ''' - - do: - indices.create: - index: sensor-1 - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - timestamp: - type: date - temperature: - type: long - voltage: - type: float - node: - type: keyword - - do: - indices.create: - index: sensor_rollup - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - node.terms.value: - type: keyword - temperature.sum.value: - type: double - temperature.max.value: - type: double - temperature.min.value: - type: double - timestamp.date_histogram.time_zone: - type: keyword - timestamp.date_histogram.interval: - type: keyword - timestamp.date_histogram.timestamp: - type: date - timestamp.date_histogram._count: - type: long - voltage.avg.value: - type: double - voltage.avg._count: - type: long - _rollup.id: - type: keyword - _rollup.version: - type: long - _meta: - _rollup: - sensor: - cron: "* * * * * ?" - rollup_index: "sensor_rollup" - index_pattern: "sensor-*" - timeout: "20s" - page_size: 1000 - groups: - date_histogram: - field: "timestamp" - interval: "7d" - time_zone: "UTC" - terms: - fields: - - "node" - id: sensor - metrics: - - field: "temperature" - metrics: - - min - - max - - sum - - field: "voltage" - metrics: - - avg - - - do: - bulk: - index: sensor_rollup - refresh: true - body: | - {"index":{}} - {"node.terms.value":"b","temperature.sum.value":201.0,"temperature.max.value":201.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":201.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.800000190734863,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516640400000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} - {"index":{}} - {"node.terms.value":"c","temperature.sum.value":200.0,"temperature.max.value":200.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":200.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.199999809265137,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516381200000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} - {"index":{}} - {"node.terms.value":"a","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.099999904632568,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516554000000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} - {"index":{}} - {"node.terms.value":"a","temperature.sum.value":200.0,"temperature.max.value":200.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":200.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.199999809265137,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516726800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} - {"index":{}} - {"node.terms.value":"b","temperature.sum.value":198.0,"temperature.max.value":198.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":198.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":5.599999904632568,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516467600000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} - {"index":{}} - {"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"} - -''' - setups['admin_role'] = ''' - - do: - security.put_role: - name: "my_admin_role" - body: > - { - "cluster": ["all"], - "indices": [ - {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} - ], - "run_as": [ "other_user" ], - "metadata" : {"version": 1} - } -''' - setups['jacknich_user'] = ''' - - do: - security.put_user: - username: "jacknich" - body: > - { - "password" : "l0ng-r4nd0m-p@ssw0rd", - "roles" : [ "admin", "other_role1" ], - "full_name" : "Jack Nicholson", - "email" : "jacknich@example.com", - "metadata" : { "intelligence" : 7 } - } - - do: - security.activate_user_profile: - body: > - { - "grant_type": "password", - "username": "jacknich", - "password" : "l0ng-r4nd0m-p@ssw0rd" - } -''' - setups['app0102_privileges'] = ''' - - do: - security.put_privileges: - body: > - { - "myapp": { - "read": { - "application": "myapp", - "name": "read", - "actions": [ - "data:read/*", - "action:login" ], - "metadata": { - "description": "Read access to myapp" - } - } - } - } -''' - setups['service_token42'] = ''' - - do: - security.create_service_token: - namespace: elastic - service: fleet-server - name: token42 -''' - setups['user_profiles'] = ''' - - do: - security.put_user: - username: "jacknich" - body: > - { - "password" : "l0ng-r4nd0m-p@ssw0rd", - "roles" : [ "admin", "other_role1" ], - "full_name" : "Jack Nicholson", - "email" : "jacknich@example.com" - } - - do: - security.put_user: - username: "jackrea" - body: > - { - "password" : "l0ng-r4nd0m-p@ssw0rd", - "roles" : [ "admin" ], - "full_name" : "Jack Reacher", - "email" : "jackrea@example.com" - } - - do: - security.put_user: - username: "jackspa" - body: > - { - "password" : "l0ng-r4nd0m-p@ssw0rd", - "roles" : [ "user" ], - "full_name" : "Jack Sparrow", - "email" : "jackspa@example.com" - } - - do: - security.activate_user_profile: - body: > - { - "grant_type": "password", - "username": "jacknich", - "password" : "l0ng-r4nd0m-p@ssw0rd" - } - - do: - security.activate_user_profile: - body: > - { - "grant_type": "password", - "username": "jackrea", - "password" : "l0ng-r4nd0m-p@ssw0rd" - } - - do: - security.activate_user_profile: - body: > - { - "grant_type": "password", - "username": "jackspa", - "password" : "l0ng-r4nd0m-p@ssw0rd" - } - # jacknich - - do: - security.update_user_profile_data: - uid: "u_79HkWkwmnBH5gqFKwoxggWPjEBOur1zLPXQPEl1VBW0_0" - body: > - { - "labels": { - "direction": "north" - }, - "data": { - "app1": { - "key1": "value1" - } - } - } - # jackrea - - do: - security.update_user_profile_data: - uid: "u_P_0BMHgaOK3p7k-PFWUCbw9dQ-UFjt01oWJ_Dp2PmPc_0" - body: > - { - "labels": { - "direction": "west" - } - } - # jackspa - - do: - security.update_user_profile_data: - uid: "u_8RKO7AKfEbSiIHZkZZ2LJy2MUSDPWDr3tMI_CkIGApU_0" - body: > - { - "labels": { - "direction": "south" - } - } -''' -} diff --git a/x-pack/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java b/x-pack/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java deleted file mode 100644 index 2c81a5686becc..0000000000000 --- a/x-pack/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.Name; - -import org.apache.http.HttpHost; -import org.elasticsearch.Version; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.rest.yaml.ClientYamlDocsTestClient; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ClientYamlTestClient; -import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; -import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; -import org.elasticsearch.xpack.test.rest.AbstractXPackRestTest; -import org.junit.After; - -import java.util.List; -import java.util.Map; - -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; -import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.hamcrest.Matchers.is; - -public class XDocsClientYamlTestSuiteIT extends AbstractXPackRestTest { - private static final String USER_TOKEN = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); - - public XDocsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @Override - protected void afterIfFailed(List errors) { - super.afterIfFailed(errors); - String name = getTestName().split("=")[1]; - name = name.substring(0, name.length() - 1); - name = name.replaceAll("/([^/]+)$", ".asciidoc:$1"); - logger.error( - "This failing test was generated by documentation starting at {}. It may include many snippets. " - + "See Elasticsearch's docs/README.asciidoc for an explanation of test generation.", - name - ); - } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } - - @Override - protected ClientYamlTestClient initClientYamlTestClient( - final ClientYamlSuiteRestSpec restSpec, - final RestClient restClient, - final List hosts, - final Version esVersion, - final Version masterVersion, - final String os - ) { - return new ClientYamlDocsTestClient( - restSpec, - restClient, - hosts, - esVersion, - masterVersion, - os, - this::getClientBuilderWithSniffedHosts - ); - } - - /** - * All tests run as a an administrative user but use es-shield-runas-user to become a less privileged user. - */ - @Override - protected Settings restClientSettings() { - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", USER_TOKEN).build(); - } - - /** - * Re-enables watcher after every test just in case any test disables it. One does. - */ - @After - public void reenableWatcher() throws Exception { - if (isWatcherTest()) { - assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); - - switch (state) { - case "stopped": - ClientYamlTestResponse startResponse = getAdminExecutionContext().callApi( - "watcher.start", - emptyMap(), - emptyList(), - emptyMap() - ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); - assertThat(isAcknowledged, is(true)); - throw new AssertionError("waiting until stopped state reached started state"); - case "stopping": - throw new AssertionError("waiting until stopping state reached stopped state to start again"); - case "starting": - throw new AssertionError("waiting until starting state reached started state"); - case "started": - // all good here, we are done - break; - default: - throw new AssertionError("unknown state[" + state + "]"); - } - }); - } - } - - protected boolean isWatcherTest() { - String testName = getTestName(); - return testName != null && (testName.contains("watcher/") || testName.contains("watcher\\")); - } - - @Override - protected boolean isMachineLearningTest() { - String testName = getTestName(); - return testName != null && (testName.contains("ml/") || testName.contains("ml\\")); - } - - /** - * Deletes users after every test just in case any test adds any. - */ - @After - public void deleteUsers() throws Exception { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("security.get_user", emptyMap(), emptyList(), emptyMap()); - @SuppressWarnings("unchecked") - Map users = (Map) response.getBody(); - for (String user : users.keySet()) { - Map metadataMap = (Map) ((Map) users.get(user)).get("metadata"); - Boolean reserved = metadataMap == null ? null : (Boolean) metadataMap.get("_reserved"); - if (reserved == null || reserved == false) { - logger.warn("Deleting leftover user {}", user); - getAdminExecutionContext().callApi("security.delete_user", singletonMap("username", user), emptyList(), emptyMap()); - } - } - } - - @Override - protected boolean randomizeContentType() { - return false; - } -} From db22afa9c730ec0a32be132eb528f244feffe24a Mon Sep 17 00:00:00 2001 From: dorukguner <38545816+dorukguner@users.noreply.github.com> Date: Wed, 13 Sep 2023 21:02:06 +1000 Subject: [PATCH 032/114] Optimize ContentPath#pathAsText (#98244) This change optimizes calls to ContentPath#pathAsText for cases where the path is still at the root of the parse tree. Also it maintains the already build path when appending several leaf fields for the same parent and by that should be less wasteful than rebuild the whole path string all over again every time. Closes #94544 --- docs/changelog/98244.yaml | 6 +++ .../index/mapper/ContentPath.java | 48 ++++++++--------- .../index/mapper/ContentPathTests.java | 51 +++++++++++++++++-- 3 files changed, 74 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/98244.yaml diff --git a/docs/changelog/98244.yaml b/docs/changelog/98244.yaml new file mode 100644 index 0000000000000..e1dde59a83e47 --- /dev/null +++ b/docs/changelog/98244.yaml @@ -0,0 +1,6 @@ +pr: 98244 +summary: Optimize ContentPath#pathAsText +area: Search +type: enhancement +issues: + - 94544 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java b/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java index e90e2bbe1afd0..1386028b4ea5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java @@ -8,42 +8,35 @@ package org.elasticsearch.index.mapper; +import java.util.Stack; + public final class ContentPath { private static final char DELIMITER = '.'; private final StringBuilder sb; - - private int index = 0; - - private String[] path = new String[10]; - + private final Stack delimiterIndexes; private boolean withinLeafObject = false; public ContentPath() { this.sb = new StringBuilder(); - } - - String[] getPath() { - // used for testing - return path; + this.delimiterIndexes = new Stack<>(); } public void add(String name) { - path[index++] = name; - if (index == path.length) { // expand if needed - expand(); - } - } - - private void expand() { - String[] newPath = new String[path.length + 10]; - System.arraycopy(path, 0, newPath, 0, path.length); - path = newPath; + // Store the location of the previous final delimiter onto the stack, + // which will be the index of the 2nd last delimiter after appending the new name + delimiterIndexes.add(sb.length() - 1); + sb.append(name).append(DELIMITER); } public void remove() { - path[--index] = null; + if (delimiterIndexes.isEmpty()) { + throw new IllegalStateException("Content path is empty"); + } + + // Deletes the last node added to the stringbuilder by deleting from the 2nd last delimiter onwards + sb.setLength(delimiterIndexes.pop() + 1); } public void setWithinLeafObject(boolean withinLeafObject) { @@ -55,15 +48,16 @@ public boolean isWithinLeafObject() { } public String pathAsText(String name) { - sb.setLength(0); - for (int i = 0; i < index; i++) { - sb.append(path[i]).append(DELIMITER); + // If length is 0 we know that we are at the root, so return the provided string directly + if (length() == 0) { + return name; } - sb.append(name); - return sb.toString(); + + return sb + name; } public int length() { - return index; + // The amount of delimiters we've added tells us the amount of nodes that have been added to the path + return delimiterIndexes.size(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java index 829e2fcbe79d6..d66d927e8e0d0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java @@ -23,10 +23,7 @@ public void testAddPath() { public void testRemovePath() { ContentPath contentPath = new ContentPath(); contentPath.add("foo"); - String[] path = contentPath.getPath(); - assertEquals("foo", path[0]); contentPath.remove(); - assertNull(path[0]); assertEquals(0, contentPath.length()); String pathAsText = contentPath.pathAsText("bar"); assertEquals("bar", pathAsText); @@ -36,7 +33,21 @@ public void testRemovePathException() { ContentPath contentPath = new ContentPath(); contentPath.add("foo"); contentPath.remove(); - expectThrows(IndexOutOfBoundsException.class, contentPath::remove); + expectThrows(IllegalStateException.class, contentPath::remove); + } + + public void testRootPath() { + ContentPath contentPath = new ContentPath(); + assertEquals("root", contentPath.pathAsText("root")); + assertEquals(0, contentPath.length()); + } + + public void testNestedPath() { + ContentPath contentPath = new ContentPath(); + contentPath.add("root"); + contentPath.add("inner"); + assertEquals("root.inner.leaf1", contentPath.pathAsText("leaf1")); + assertEquals("root.inner.leaf2", contentPath.pathAsText("leaf2")); } public void testBehaviourWithLongerPath() { @@ -82,6 +93,15 @@ public void testPathAsText() { assertEquals("foo.bar.baz", contentPath.pathAsText("baz")); } + public void testPathTextAfterLeafRemoval() { + ContentPath contentPath = new ContentPath(); + contentPath.add("root"); + contentPath.add("inner"); + contentPath.add("leaf"); + contentPath.remove(); + assertEquals("root.inner.newLeaf", contentPath.pathAsText("newLeaf")); + } + public void testPathAsTextAfterRemove() { ContentPath contentPath = new ContentPath(); contentPath.add("foo"); @@ -100,4 +120,27 @@ public void testPathAsTextAfterRemoveAndMoreAdd() { contentPath.add("baz"); assertEquals("foo.baz.qux", contentPath.pathAsText("qux")); } + + public void testPathTextAfterRootRemovalAndNewPathAdded() { + ContentPath contentPath = new ContentPath(); + contentPath.add("root"); + contentPath.add("inner"); + contentPath.add("leaf"); + contentPath.remove(); + contentPath.remove(); + contentPath.remove(); + contentPath.add("newRoot"); + contentPath.add("newInner"); + assertEquals("newRoot.newInner.newLeaf", contentPath.pathAsText("newLeaf")); + } + + public void testPathTextRemovalAfterPathAsTextHasBeenCalled() { + ContentPath contentPath = new ContentPath(); + contentPath.add("root"); + contentPath.add("inner"); + contentPath.pathAsText("leaf"); + contentPath.remove(); + contentPath.add("newInner"); + assertEquals("root.newInner.newLeaf", contentPath.pathAsText("newLeaf")); + } } From af895549cdce9c7021b545c4904cc455bb257a93 Mon Sep 17 00:00:00 2001 From: eyalkoren <41850454+eyalkoren@users.noreply.github.com> Date: Wed, 13 Sep 2023 14:25:13 +0300 Subject: [PATCH 033/114] Fix composable templates with `subobjects: false` (#97317) --- docs/changelog/97317.yaml | 6 + .../datastreams/DataStreamIT.java | 43 +- .../DataStreamIndexSettingsProvider.java | 5 +- .../15_composition.yml | 143 +++ .../test/tsdb/15_timestamp_mapping.yml | 4 +- .../TransportSimulateIndexTemplateAction.java | 4 +- .../metadata/MetadataCreateIndexService.java | 9 +- .../MetadataIndexTemplateService.java | 16 +- .../common/xcontent/XContentHelper.java | 130 ++- .../index/mapper/MapperErrors.java | 19 + .../index/mapper/MapperService.java | 178 +++- .../index/mapper/MappingParser.java | 39 +- .../index/mapper/NestedObjectMapper.java | 2 +- .../index/mapper/ObjectMapper.java | 14 +- .../MetadataIndexTemplateServiceTests.java | 96 +- .../xcontent/support/XContentHelperTests.java | 167 ++++ .../index/mapper/DocumentMapperTests.java | 4 +- .../index/mapper/DynamicTemplatesTests.java | 14 +- .../index/mapper/MapperServiceTests.java | 820 ++++++++++++++++++ .../index/mapper/ObjectMapperTests.java | 12 +- .../elasticsearch/xpack/ccr/AutoFollowIT.java | 2 +- 21 files changed, 1623 insertions(+), 104 deletions(-) create mode 100644 docs/changelog/97317.yaml create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/MapperErrors.java diff --git a/docs/changelog/97317.yaml b/docs/changelog/97317.yaml new file mode 100644 index 0000000000000..64fcd55e67e28 --- /dev/null +++ b/docs/changelog/97317.yaml @@ -0,0 +1,6 @@ +pr: 97317 +summary: "Fix merges of mappings with `subobjects: false` for composable index templates" +area: Mapping +type: bug +issues: + - 96768 diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 803c63d1836d7..384970bdc7ab9 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -514,7 +514,7 @@ public void testTimeStampValidationInvalidFieldMapping() throws Exception { { "properties": { "@timestamp": { - "type": "keyword" + "type": "long" } } }"""; @@ -538,7 +538,7 @@ public void testTimeStampValidationInvalidFieldMapping() throws Exception { ); assertThat( e.getCause().getCause().getMessage(), - equalTo("data stream timestamp field [@timestamp] is of type [keyword], but [date,date_nanos] is expected") + equalTo("data stream timestamp field [@timestamp] is of type [long], but [date,date_nanos] is expected") ); } @@ -1988,18 +1988,13 @@ public void testPartitionedTemplate() throws IOException { ).actionGet(); /** - * routing enable with allow custom routing false + * routing settings with allow custom routing false */ template = new ComposableIndexTemplate( List.of("logs"), new Template( Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), - new CompressedXContent(""" - { - "_routing": { - "required": true - } - }"""), + null, null ), null, @@ -2024,6 +2019,36 @@ public void testPartitionedTemplate() throws IOException { ); } + public void testRoutingEnabledInMappingDisabledInDataStreamTemplate() throws IOException { + ComposableIndexTemplate template = new ComposableIndexTemplate( + List.of("logs"), + new Template( + Settings.builder().put("index.number_of_shards", "3").put("index.routing_partition_size", "2").build(), + new CompressedXContent(""" + { + "_routing": { + "required": true + } + }"""), + null + ), + null, + null, + null, + null, + new ComposableIndexTemplate.DataStreamTemplate(false, false) + ); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> client().execute( + PutComposableIndexTemplateAction.INSTANCE, + new PutComposableIndexTemplateAction.Request("my-it").indexTemplate(template) + ).actionGet() + ); + Exception actualException = (Exception) e.getCause(); + assertTrue(Throwables.getRootCause(actualException).getMessage().contains("contradicting `_routing.required` settings")); + } + public void testSearchWithRouting() throws IOException, ExecutionException, InterruptedException { /** * partition size with routing required diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 064030ed2b6d5..45d4b4df159d8 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -159,10 +159,7 @@ private List findRoutingPaths(String indexName, Settings allSettings, Li tmpIndexMetadata.settings(finalResolvedSettings); // Create MapperService just to extract keyword dimension fields: try (var mapperService = mapperServiceFactory.apply(tmpIndexMetadata.build())) { - for (var mapping : combinedTemplateMappings) { - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MapperService.MergeReason.INDEX_TEMPLATE); - } - + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, combinedTemplateMappings, MapperService.MergeReason.INDEX_TEMPLATE); List routingPaths = new ArrayList<>(); for (var fieldMapper : mapperService.documentMapper().mappers().fieldMappers()) { extractPath(routingPaths, fieldMapper); diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml index 2aaf492f0ff0d..51c12892c4859 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml @@ -349,3 +349,146 @@ index_patterns: ["purple-index"] composed_of: ["red", "blue"] ignore_missing_component_templates: ["foo"] + +--- +"Composable index templates that include subobjects: false at root": + - skip: + version: ' - 8.10.99' + reason: 'https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0' + features: allowed_warnings + + - do: + cluster.put_component_template: + name: test-subobjects + body: + template: + mappings: + subobjects: false + + - do: + cluster.put_component_template: + name: test-field + body: + template: + mappings: + properties: + parent.subfield: + type: keyword + + - do: + allowed_warnings: + - "index template [test-composable-template] has index patterns [test-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-template] will take precedence during new index creation" + indices.put_index_template: + name: test-composable-template + body: + index_patterns: + - test-* + composed_of: + - test-subobjects + - test-field + - is_true: acknowledged + + - do: + indices.create: + index: test-generic + + - do: + indices.get_mapping: + index: test-generic + - match: { test-generic.mappings.properties.parent\.subfield.type: "keyword" } + +--- +"Composable index templates that include subobjects: false on arbitrary field": + - skip: + version: ' - 8.10.99' + reason: 'https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0' + features: allowed_warnings + + - do: + cluster.put_component_template: + name: test-subobjects + body: + template: + mappings: + properties: + parent: + type: object + subobjects: false + + - do: + cluster.put_component_template: + name: test-subfield + body: + template: + mappings: + properties: + parent: + properties: + child.grandchild: + type: keyword + + - do: + allowed_warnings: + - "index template [test-composable-template] has index patterns [test-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-template] will take precedence during new index creation" + indices.put_index_template: + name: test-composable-template + body: + index_patterns: + - test-* + composed_of: + - test-subobjects + - test-subfield + - is_true: acknowledged + + - do: + indices.create: + index: test-generic + + - do: + indices.get_mapping: + index: test-generic + - match: { test-generic.mappings.properties.parent.properties.child\.grandchild.type: "keyword" } + +--- +"Composition of component templates with different legal field mappings": + - skip: + features: allowed_warnings + + - do: + cluster.put_component_template: + name: mapping + body: + template: + mappings: + properties: + field: + type: long + coerce: true + + - do: + allowed_warnings: + - "index template [test-composable-template] has index patterns [test-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-template] will take precedence during new index creation" + indices.put_index_template: + name: test-composable-template + body: + index_patterns: + - test-* + composed_of: + - mapping + template: + mappings: + properties: + field: + type: keyword + ignore_above: 1024 + - is_true: acknowledged + + - do: + indices.create: + index: test-generic + + - do: + indices.get_mapping: + index: test-generic + - match: { test-generic.mappings.properties.field.type: "keyword" } + - match: { test-generic.mappings.properties.field.ignore_above: 1024 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml index dbc8076c0a1a8..21b5399b0eaad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml @@ -148,7 +148,7 @@ reject @timestamp with wrong type: reason: introduced in 8.1.0 - do: - catch: /data stream timestamp field \[@timestamp\] is of type \[keyword\], but \[date,date_nanos\] is expected/ + catch: /data stream timestamp field \[@timestamp\] is of type \[long\], but \[date,date_nanos\] is expected/ indices.create: index: test body: @@ -163,7 +163,7 @@ reject @timestamp with wrong type: mappings: properties: "@timestamp": - type: keyword + type: long --- reject timestamp meta field with wrong type: diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index c8b7faf698530..d69f7dc8a8607 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -294,9 +294,7 @@ public static Template resolveTemplate( indexMetadata, tempIndexService -> { MapperService mapperService = tempIndexService.mapperService(); - for (CompressedXContent mapping : mappings) { - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MapperService.MergeReason.INDEX_TEMPLATE); - } + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappings, MapperService.MergeReason.INDEX_TEMPLATE); DocumentMapper documentMapper = mapperService.documentMapper(); return documentMapper != null ? documentMapper.mappingSource() : null; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 763e3d3574752..3302549a1b860 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1331,13 +1331,14 @@ private static void updateIndexMappingsAndBuildSortOrder( ) throws IOException { MapperService mapperService = indexService.mapperService(); IndexMode indexMode = indexService.getIndexSettings() != null ? indexService.getIndexSettings().getMode() : IndexMode.STANDARD; + List allMappings = new ArrayList<>(); final CompressedXContent defaultMapping = indexMode.getDefaultMapping(); if (defaultMapping != null) { - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, defaultMapping, MergeReason.INDEX_TEMPLATE); - } - for (CompressedXContent mapping : mappings) { - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MergeReason.INDEX_TEMPLATE); + allMappings.add(defaultMapping); } + allMappings.addAll(mappings); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, allMappings, MergeReason.INDEX_TEMPLATE); + indexMode.validateTimestampFieldMapping(request.dataStreamName() != null, mapperService.mappingLookup()); if (sourceMetadata == null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 64a0d56ce1649..5415454a10486 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -84,7 +84,7 @@ public class MetadataIndexTemplateService { public static final String DEFAULT_TIMESTAMP_FIELD = "@timestamp"; - public static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING; + public static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING_WITHOUT_ROUTING; private static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING_WITH_ROUTING; @@ -96,8 +96,14 @@ public class MetadataIndexTemplateService { Map.of("type", DateFieldMapper.CONTENT_TYPE, "ignore_malformed", "false") ); try { - DEFAULT_TIMESTAMP_MAPPING = new CompressedXContent( + DEFAULT_TIMESTAMP_MAPPING_WITHOUT_ROUTING = new CompressedXContent( (builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) + // adding explicit "_routing": {"required": false}, even though this is the default, because this snippet is used + // later for resolving a RoutingFieldMapper, where we need this information to validate that does not conflict with + // any mapping. + .startObject(RoutingFieldMapper.NAME) + .field("required", false) + .endObject() .field("properties", defaultTimestampField) .endObject() ); @@ -1303,7 +1309,7 @@ public static List collectMappings( if (template.getDataStreamTemplate().isAllowCustomRouting()) { mappings.add(0, DEFAULT_TIMESTAMP_MAPPING_WITH_ROUTING); } else { - mappings.add(0, DEFAULT_TIMESTAMP_MAPPING); + mappings.add(0, DEFAULT_TIMESTAMP_MAPPING_WITHOUT_ROUTING); } } @@ -1599,9 +1605,7 @@ private static void validateCompositeTemplate( List mappings = collectMappings(stateWithIndex, templateName, indexName); try { MapperService mapperService = tempIndexService.mapperService(); - for (CompressedXContent mapping : mappings) { - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MapperService.MergeReason.INDEX_TEMPLATE); - } + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappings, MapperService.MergeReason.INDEX_TEMPLATE); if (template.getDataStreamTemplate() != null) { validateTimestampFieldMapping(mapperService.mappingLookup()); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 4ac8f34571624..3bfe5078a3487 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -408,53 +408,110 @@ public static boolean update(Map source, Map cha } /** - * Merges the defaults provided as the second parameter into the content of the first. Only does recursive merge - * for inner maps. + * Merges the defaults provided as the second parameter into the content of the first. Only does recursive merge for inner maps. */ public static void mergeDefaults(Map content, Map defaults) { - for (Map.Entry defaultEntry : defaults.entrySet()) { - if (content.containsKey(defaultEntry.getKey()) == false) { - // copy it over, it does not exists in the content - content.put(defaultEntry.getKey(), defaultEntry.getValue()); + merge(content, defaults, null); + } + + /** + * Merges the map provided as the second parameter into the content of the first. Only does recursive merge for inner maps. + * If a non-null {@link CustomMerge} is provided, it is applied whenever a merge is required, meaning - whenever both the first and + * the second map has values for the same key. Otherwise, values from the first map will always have precedence, meaning - if the + * first map contains a key, its value will not be overriden. + * @param first the map which serves as the merge base + * @param second the map of which contents are merged into the base map + * @param customMerge a custom merge rule to apply whenever a key has concrete values (i.e. not a map or a collection) in both maps + */ + public static void merge(Map first, Map second, @Nullable CustomMerge customMerge) { + merge(null, first, second, customMerge); + } + + /** + * Merges the map provided as the second parameter into the content of the first. Only does recursive merge for inner maps. + * If a non-null {@link CustomMerge} is provided, it is applied whenever a merge is required, meaning - whenever both the first and + * the second map has values for the same key. Otherwise, values from the first map will always have precedence, meaning - if the + * first map contains a key, its value will not be overridden. + * + * @param parent used for recursion to maintain knowledge about the common parent of the currently merged sub-maps, if such exists + * @param first the map which serves as the merge base + * @param second the map of which contents are merged into the base map + * @param customMerge a custom merge rule to apply whenever a key has concrete values (i.e. not a map or a collection) in both maps + */ + public static void merge( + @Nullable String parent, + Map first, + Map second, + @Nullable CustomMerge customMerge + ) { + for (Map.Entry toMergeEntry : second.entrySet()) { + if (first.containsKey(toMergeEntry.getKey()) == false) { + // copy it over, it does not exist in the content + first.put(toMergeEntry.getKey(), toMergeEntry.getValue()); } else { - // in the content and in the default, only merge compound ones (maps) - if (content.get(defaultEntry.getKey()) instanceof Map && defaultEntry.getValue() instanceof Map) { - mergeDefaults((Map) content.get(defaultEntry.getKey()), (Map) defaultEntry.getValue()); - } else if (content.get(defaultEntry.getKey()) instanceof List && defaultEntry.getValue() instanceof List) { - List defaultList = (List) defaultEntry.getValue(); - List contentList = (List) content.get(defaultEntry.getKey()); - - if (allListValuesAreMapsOfOne(defaultList) && allListValuesAreMapsOfOne(contentList)) { + // has values in both maps, merge compound ones (maps) + Object baseValue = first.get(toMergeEntry.getKey()); + if (baseValue instanceof Map && toMergeEntry.getValue() instanceof Map) { + Map mergedValue = null; + if (customMerge != null) { + Object tmp = customMerge.merge(parent, toMergeEntry.getKey(), baseValue, toMergeEntry.getValue()); + if (tmp != null && tmp instanceof Map == false) { + throw new IllegalStateException("merging of values for [" + toMergeEntry.getKey() + "] must yield a map"); + } + mergedValue = (Map) tmp; + } + if (mergedValue != null) { + first.put(toMergeEntry.getKey(), mergedValue); + } else { + // if custom merge does not yield a value to be used, continue recursive merge + merge( + toMergeEntry.getKey(), + (Map) baseValue, + (Map) toMergeEntry.getValue(), + customMerge + ); + } + } else if (baseValue instanceof List && toMergeEntry.getValue() instanceof List) { + List listToMerge = (List) toMergeEntry.getValue(); + List baseList = (List) baseValue; + + if (allListValuesAreMapsOfOne(listToMerge) && allListValuesAreMapsOfOne(baseList)) { // all are in the form of [ {"key1" : {}}, {"key2" : {}} ], merge based on keys Map> processed = new LinkedHashMap<>(); - for (Object o : contentList) { + for (Object o : baseList) { Map map = (Map) o; Map.Entry entry = map.entrySet().iterator().next(); processed.put(entry.getKey(), map); } - for (Object o : defaultList) { + for (Object o : listToMerge) { Map map = (Map) o; Map.Entry entry = map.entrySet().iterator().next(); if (processed.containsKey(entry.getKey())) { - mergeDefaults(processed.get(entry.getKey()), map); + merge(toMergeEntry.getKey(), processed.get(entry.getKey()), map, customMerge); } else { - // put the default entries after the content ones. + // append the second list's entries after the first list's entries. processed.put(entry.getKey(), map); } } - content.put(defaultEntry.getKey(), new ArrayList<>(processed.values())); + first.put(toMergeEntry.getKey(), new ArrayList<>(processed.values())); } else { - // if both are lists, simply combine them, first the defaults, then the content + // if both are lists, simply combine them, first the second list's values, then the first's // just make sure not to add the same value twice - List mergedList = new ArrayList<>(defaultList); + // custom merge is not applicable here + List mergedList = new ArrayList<>(listToMerge); - for (Object o : contentList) { + for (Object o : baseList) { if (mergedList.contains(o) == false) { mergedList.add(o); } } - content.put(defaultEntry.getKey(), mergedList); + first.put(toMergeEntry.getKey(), mergedList); + } + } else if (customMerge != null) { + Object mergedValue = customMerge.merge(parent, toMergeEntry.getKey(), baseValue, toMergeEntry.getValue()); + if (mergedValue != null) { + first.put(toMergeEntry.getKey(), mergedValue); } } } @@ -473,6 +530,33 @@ private static boolean allListValuesAreMapsOfOne(List list) { return true; } + /** + * A {@code FunctionalInterface} that can be used in order to customize map merges. + */ + @FunctionalInterface + public interface CustomMerge { + /** + * Based on the provided arguments, compute a value to use for the given key as a merge result. + * If this method returns a non-{@code null} value, then the merge result will replace the original value of the provided key in + * the base map. + * If this method returns {@code null}, then: + *
    + *
  • if the values are of map type, the old and new values will be merged recursively + *
  • otherwise, the original value will be maintained + *
+ * This method doesn't throw a checked exception, but it is expected that illegal merges will result in a {@link RuntimeException}. + * @param parent merged field's parent + * @param key merged field's name + * @param oldValue original value of the provided key + * @param newValue the new value of the provided key which is to be merged with the original + * @return the merged value to use for the given key, or {@code null} if there is no custom merge result for it. If {@code null} + * is returned, the algorithm will live the original value as is, unless it is a map, in which case the new map will be merged + * into the old map recursively. + */ + @Nullable + Object merge(String parent, String key, Object oldValue, Object newValue); + } + /** * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using * {@link XContentBuilder#rawField(String, InputStream)}. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperErrors.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperErrors.java new file mode 100644 index 0000000000000..29b5b18d12b1e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperErrors.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +public class MapperErrors { + static void throwObjectMappingConflictError(String fieldName) throws IllegalArgumentException { + throw new IllegalArgumentException("can't merge a non object mapping [" + fieldName + "] with an object mapping"); + } + + static void throwNestedMappingConflictError(String fieldName) throws IllegalArgumentException { + throw new IllegalArgumentException("can't merge a non-nested mapping [" + fieldName + "] with a nested mapping"); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 512c35a146d0c..9f9e45f53b837 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -39,10 +40,12 @@ import java.io.IOException; import java.io.InputStream; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; @@ -367,24 +370,162 @@ public void merge(IndexMetadata indexMetadata, MergeReason reason) { } } + /** + * Merging the provided mappings. Actual merging is done in the raw, non-parsed, form of the mappings. This allows to do a proper bulk + * merge, where parsing is done only when all raw mapping settings are already merged. + */ + public DocumentMapper merge(String type, List mappingSources, MergeReason reason) { + final DocumentMapper currentMapper = this.mapper; + if (currentMapper != null && mappingSources.size() == 1 && currentMapper.mappingSource().equals(mappingSources.get(0))) { + return currentMapper; + } + + Map mergedRawMapping = null; + for (CompressedXContent mappingSource : mappingSources) { + Map rawMapping = MappingParser.convertToMap(mappingSource); + + // normalize mappings, making sure that all have the provided type as a single root + if (rawMapping.containsKey(type)) { + if (rawMapping.size() > 1) { + throw new MapperParsingException("cannot merge a map with multiple roots, one of which is [" + type + "]"); + } + } else { + rawMapping = Map.of(type, rawMapping); + } + + if (mergedRawMapping == null) { + mergedRawMapping = rawMapping; + } else { + XContentHelper.merge(type, mergedRawMapping, rawMapping, RawFieldMappingMerge.INSTANCE); + } + } + if (mergedRawMapping != null && mergedRawMapping.size() > 1) { + throw new MapperParsingException("cannot merge mapping sources with different roots"); + } + return (mergedRawMapping != null) ? doMerge(type, reason, mergedRawMapping) : null; + } + + /** + * A {@link org.elasticsearch.common.xcontent.XContentHelper.CustomMerge} for raw map merges that are suitable for index/field mappings. + * The default raw map merge algorithm doesn't override values - if there are multiple values for a key, then: + *
    + *
  • if the values are of map type, the old and new values will be merged recursively + *
  • otherwise, the original value will be maintained + *
+ * When merging field mappings, we want something else. Specifically: + *
    + *
  • within field mappings node (which is nested within a {@code properties} node): + *
      + *
    • if both the base mapping and the mapping to merge into it are of mergeable types (e.g {@code object -> object}, + * {@code object -> nested}), then we only want to merge specific mapping entries: + *
        + *
      • {@code properties} node - merging fields from both mappings + *
      • {@code subobjects} entry - since this setting affects an entire subtree, we need to keep it when merging + *
      + *
    • otherwise, for any couple of non-mergeable types ((e.g {@code object -> long}, {@code long -> long}) - we just want + * to replace the entire mappings subtree, let the last one win + *
    + *
  • any other map values that are not encountered within a {@code properties} node (e.g. "_doc", "_meta" or "properties" + * itself) - apply recursive merge as the default algorithm would apply + *
  • any non-map values - override the value of the base map with the value of the merged map + *
+ */ + private static class RawFieldMappingMerge implements XContentHelper.CustomMerge { + private static final XContentHelper.CustomMerge INSTANCE = new RawFieldMappingMerge(); + + private static final Set MERGEABLE_OBJECT_TYPES = Set.of(ObjectMapper.CONTENT_TYPE, NestedObjectMapper.CONTENT_TYPE); + + private RawFieldMappingMerge() {} + + @SuppressWarnings("unchecked") + @Override + public Object merge(String parent, String key, Object oldValue, Object newValue) { + if (oldValue instanceof Map && newValue instanceof Map) { + if ("properties".equals(parent)) { + // merging two mappings of the same field, where "key" is the field name + Map baseMap = (Map) oldValue; + Map mapToMerge = (Map) newValue; + if (shouldMergeFieldMappings(baseMap, mapToMerge)) { + // if two field mappings are to be merged, we only want to keep some specific entries from the base mapping and + // let all others be overridden by the second mapping + Map mergedMappings = new HashMap<>(); + // we must keep the "properties" node, otherwise our merge has no point + if (baseMap.containsKey("properties")) { + mergedMappings.put("properties", new HashMap<>((Map) baseMap.get("properties"))); + } + // the "subobjects" setting affects an entire subtree and not only locally where it is configured + if (baseMap.containsKey("subobjects")) { + mergedMappings.put("subobjects", baseMap.get("subobjects")); + } + // recursively merge these two field mappings + XContentHelper.merge(key, mergedMappings, mapToMerge, INSTANCE); + return mergedMappings; + } else { + // non-mergeable types - replace the entire mapping subtree for this field + return mapToMerge; + } + } + // anything else (e.g. "_doc", "_meta", "properties") - no custom merge, rely on caller merge logic + // field mapping entries of Map type (like "fields" and "meta") are handled above and should never reach here + return null; + } else { + if (key.equals("required")) { + // we look for explicit `_routing.required` settings because we use them to detect contradictions of this setting + // that comes from mappings with such that comes from the optional `data_stream` configuration of composable index + // templates + if ("_routing".equals(parent) && oldValue != newValue) { + throw new MapperParsingException("contradicting `_routing.required` settings"); + } + } + return newValue; + } + } + + /** + * Normally, we don't want to merge raw field mappings, however there are cases where we do, for example - two + * "object" (or "nested") mappings. + * + * @param mappings1 first mapping of a field + * @param mappings2 second mapping of a field + * @return {@code true} if the second mapping should be merged into the first mapping + */ + private boolean shouldMergeFieldMappings(Map mappings1, Map mappings2) { + String type1 = (String) mappings1.get("type"); + if (type1 == null && mappings1.get("properties") != null) { + type1 = ObjectMapper.CONTENT_TYPE; + } + String type2 = (String) mappings2.get("type"); + if (type2 == null && mappings2.get("properties") != null) { + type2 = ObjectMapper.CONTENT_TYPE; + } + if (type1 == null || type2 == null) { + return false; + } + return MERGEABLE_OBJECT_TYPES.contains(type1) && MERGEABLE_OBJECT_TYPES.contains(type2); + } + } + public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason) { final DocumentMapper currentMapper = this.mapper; if (currentMapper != null && currentMapper.mappingSource().equals(mappingSource)) { return currentMapper; } - synchronized (this) { - Mapping incomingMapping = parseMapping(type, mappingSource); - Mapping mapping = mergeMappings(this.mapper, incomingMapping, reason); - // TODO: In many cases the source here is equal to mappingSource so we need not serialize again. - // We should identify these cases reliably and save expensive serialization here - DocumentMapper newMapper = newDocumentMapper(mapping, reason, mapping.toCompressedXContent()); - if (reason == MergeReason.MAPPING_UPDATE_PREFLIGHT) { - return newMapper; - } - this.mapper = newMapper; - assert assertSerialization(newMapper); + Map mappingSourceAsMap = MappingParser.convertToMap(mappingSource); + return doMerge(type, reason, mappingSourceAsMap); + } + + private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map mappingSourceAsMap) { + Mapping incomingMapping = parseMapping(type, mappingSourceAsMap); + Mapping mapping = mergeMappings(this.mapper, incomingMapping, reason); + // TODO: In many cases the source here is equal to mappingSource so we need not serialize again. + // We should identify these cases reliably and save expensive serialization here + DocumentMapper newMapper = newDocumentMapper(mapping, reason, mapping.toCompressedXContent()); + if (reason == MergeReason.MAPPING_UPDATE_PREFLIGHT) { return newMapper; } + this.mapper = newMapper; + assert assertSerialization(newMapper); + return newMapper; } private DocumentMapper newDocumentMapper(Mapping mapping, MergeReason reason, CompressedXContent mappingSource) { @@ -401,6 +542,21 @@ public Mapping parseMapping(String mappingType, CompressedXContent mappingSource } } + /** + * A method to parse mapping from a source in a map form. + * + * @param mappingType the mapping type + * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise + * @return a parsed mapping + */ + public Mapping parseMapping(String mappingType, Map mappingSource) { + try { + return mappingParser.parse(mappingType, mappingSource); + } catch (Exception e) { + throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); + } + } + public static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMapping, MergeReason reason) { Mapping newMapping; if (currentMapper == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index a310d2bf0b0ae..4cc0a48e939b1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -73,19 +73,35 @@ private static String getRemainingFields(Map map) { return remainingFields.toString(); } - @SuppressWarnings("unchecked") - Mapping parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { + static Map convertToMap(CompressedXContent source) { Objects.requireNonNull(source, "source cannot be null"); - Map mapping = XContentHelper.convertToMap(source.compressedReference(), true, XContentType.JSON).v2(); - if (mapping.isEmpty()) { + return XContentHelper.convertToMap(source.compressedReference(), true, XContentType.JSON).v2(); + } + + Mapping parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { + Map mapping = convertToMap(source); + return parse(type, mapping); + } + + /** + * A method to parse mapping from a source in a map form. + * + * @param type the mapping type + * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise + * @return a parsed mapping + * @throws MapperParsingException in case of parsing error + */ + @SuppressWarnings("unchecked") + Mapping parse(@Nullable String type, Map mappingSource) throws MapperParsingException { + if (mappingSource.isEmpty()) { if (type == null) { throw new MapperParsingException("malformed mapping, no type name found"); } } else { - String rootName = mapping.keySet().iterator().next(); + String rootName = mappingSource.keySet().iterator().next(); if (type == null || type.equals(rootName) || documentTypeResolver.apply(type).equals(rootName)) { type = rootName; - mapping = (Map) mapping.get(rootName); + mappingSource = (Map) mappingSource.get(rootName); } } if (type == null) { @@ -94,19 +110,16 @@ Mapping parse(@Nullable String type, CompressedXContent source) throws MapperPar if (type.isEmpty()) { throw new MapperParsingException("type cannot be an empty string"); } - return parse(type, mapping); - } - private Mapping parse(String type, Map mapping) throws MapperParsingException { final MappingParserContext mappingParserContext = mappingParserContextSupplier.get(); - RootObjectMapper.Builder rootObjectMapper = RootObjectMapper.parse(type, mapping, mappingParserContext); + RootObjectMapper.Builder rootObjectMapper = RootObjectMapper.parse(type, mappingSource, mappingParserContext); Map, MetadataFieldMapper> metadataMappers = metadataMappersSupplier.get(); Map meta = null; boolean isSourceSynthetic = mappingParserContext.getIndexSettings().getMode().isSyntheticSourceEnabled(); - Iterator> iterator = mapping.entrySet().iterator(); + Iterator> iterator = mappingSource.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = iterator.next(); String fieldName = entry.getKey(); @@ -134,7 +147,7 @@ private Mapping parse(String type, Map mapping) throws MapperPar } @SuppressWarnings("unchecked") - Map removed = (Map) mapping.remove("_meta"); + Map removed = (Map) mappingSource.remove("_meta"); if (removed != null) { /* * It may not be required to copy meta here to maintain immutability but the cost is pretty low here. @@ -154,7 +167,7 @@ private Mapping parse(String type, Map mapping) throws MapperPar } if (mappingParserContext.indexVersionCreated().isLegacyIndexVersion() == false) { // legacy indices are allowed to have extra definitions that we ignore (we will drop them on import) - checkNoRemainingFields(mapping, "Root mapping definition has unsupported parameters: "); + checkNoRemainingFields(mappingSource, "Root mapping definition has unsupported parameters: "); } return new Mapping( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 68f07c1a2cad1..d772df91a3b40 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -201,7 +201,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, MapperBuilderContext parentBuilderContext) { if ((mergeWith instanceof NestedObjectMapper) == false) { - throw new IllegalArgumentException("can't merge a non nested mapping [" + mergeWith.name() + "] with a nested mapping"); + MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; NestedObjectMapper toMerge = (NestedObjectMapper) clone(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index c59e25e54ae70..c62f010b35af2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -251,7 +251,7 @@ protected static Explicit parseSubobjects(Map node) { if (subobjectsNode != null) { return Explicit.explicitBoolean(XContentMapValues.nodeBooleanValue(subobjectsNode, "subobjects.subobjects")); } - return Explicit.IMPLICIT_TRUE; + return Defaults.SUBOBJECTS; } protected static void parseProperties( @@ -467,11 +467,11 @@ protected MapperBuilderContext createChildContext(MapperBuilderContext mapperBui public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { if ((mergeWith instanceof ObjectMapper) == false) { - throw new IllegalArgumentException("can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping"); + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } if (mergeWith instanceof NestedObjectMapper) { // TODO stop NestedObjectMapper extending ObjectMapper? - throw new IllegalArgumentException("can't merge a nested mapping [" + mergeWith.name() + "] with a non-nested mapping"); + MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; ObjectMapper merged = clone(); @@ -512,10 +512,10 @@ protected void doMerge(final ObjectMapper mergeWith, MergeReason reason, MapperB merged = objectMapper.merge(mergeWithMapper, reason, objectBuilderContext); } else { assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; - if (mergeWithMapper instanceof ObjectMapper) { - throw new IllegalArgumentException( - "can't merge a non object mapping [" + mergeWithMapper.name() + "] with an object mapping" - ); + if (mergeWithMapper instanceof NestedObjectMapper) { + MapperErrors.throwNestedMappingConflictError(mergeWithMapper.name()); + } else if (mergeWithMapper instanceof ObjectMapper) { + MapperErrors.throwObjectMappingConflictError(mergeWithMapper.name()); } // If we're merging template mappings when creating an index, then a field definition always diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index eb99616163502..993146362acad 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -1252,7 +1252,12 @@ public void testDefinedTimestampMappingIsAddedForDataStreamTemplates() throws Ex equalTo( Map.of( "_doc", - Map.of("properties", Map.of(DEFAULT_TIMESTAMP_FIELD, Map.of("type", "date", "ignore_malformed", "false"))) + Map.of( + "properties", + Map.of(DEFAULT_TIMESTAMP_FIELD, Map.of("type", "date", "ignore_malformed", "false")), + "_routing", + Map.of("required", false) + ) ) ) ); @@ -1364,7 +1369,12 @@ public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception equalTo( Map.of( "_doc", - Map.of("properties", Map.of(DEFAULT_TIMESTAMP_FIELD, Map.of("type", "date", "ignore_malformed", "false"))) + Map.of( + "properties", + Map.of(DEFAULT_TIMESTAMP_FIELD, Map.of("type", "date", "ignore_malformed", "false")), + "_routing", + Map.of("required", false) + ) ) ) ); @@ -1418,7 +1428,12 @@ public void testUserDefinedMappingTakesPrecedenceOverDefault() throws Exception equalTo( Map.of( "_doc", - Map.of("properties", Map.of(DEFAULT_TIMESTAMP_FIELD, Map.of("type", "date", "ignore_malformed", "false"))) + Map.of( + "properties", + Map.of(DEFAULT_TIMESTAMP_FIELD, Map.of("type", "date", "ignore_malformed", "false")), + "_routing", + Map.of("required", false) + ) ) ) ); @@ -1820,6 +1835,7 @@ public void testIndexTemplateFailsToOverrideComponentTemplateMappingField() thro "properties": { "field2": { "type": "object", + "subobjects": false, "properties": { "foo": { "type": "integer" @@ -1842,7 +1858,12 @@ public void testIndexTemplateFailsToOverrideComponentTemplateMappingField() thro { "properties": { "field2": { - "type": "text" + "type": "object", + "properties": { + "bar": { + "type": "object" + } + } } } }"""), null), randomBoolean() ? Arrays.asList("c1", "c2") : Arrays.asList("c2", "c1"), 0L, 1L, null, null, null); @@ -1864,7 +1885,7 @@ public void testIndexTemplateFailsToOverrideComponentTemplateMappingField() thro assertNotNull(e.getCause().getCause()); assertThat( e.getCause().getCause().getMessage(), - containsString("can't merge a non object mapping [field2] with an object mapping") + containsString("Tried to add subobject [bar] to object [field2] which does not support subobjects") ); } @@ -1911,6 +1932,7 @@ public void testUpdateComponentTemplateFailsIfResolvedIndexTemplatesWouldBeInval "properties": { "field2": { "type": "object", + "subobjects": false, "properties": { "foo": { "type": "integer" @@ -1949,7 +1971,12 @@ public void testUpdateComponentTemplateFailsIfResolvedIndexTemplatesWouldBeInval { "properties": { "field2": { - "type": "text" + "type": "object", + "properties": { + "bar": { + "type": "object" + } + } } } } @@ -1975,7 +2002,7 @@ public void testUpdateComponentTemplateFailsIfResolvedIndexTemplatesWouldBeInval assertNotNull(e.getCause().getCause().getCause()); assertThat( e.getCause().getCause().getCause().getMessage(), - containsString("can't merge a non object mapping [field2] with an object mapping") + containsString("Tried to add subobject [bar] to object [field2] which does not support subobjects") ); } @@ -2515,6 +2542,61 @@ public void testAddInvalidTemplateIgnoreService() throws Exception { assertThat(e.getMessage(), containsString("missing component templates [fail] that does not exist")); } + public void testComposableTemplateWithSubobjectsFalse() throws Exception { + final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); + ClusterState state = ClusterState.EMPTY_STATE; + + ComponentTemplate subobjects = new ComponentTemplate(new Template(null, new CompressedXContent(""" + { + "subobjects": false + } + """), null), null, null); + + ComponentTemplate fieldMapping = new ComponentTemplate(new Template(null, new CompressedXContent(""" + { + "properties": { + "parent.subfield": { + "type": "keyword" + } + } + } + """), null), null, null); + + state = service.addComponentTemplate(state, true, "subobjects", subobjects); + state = service.addComponentTemplate(state, true, "field_mapping", fieldMapping); + ComposableIndexTemplate it = new ComposableIndexTemplate( + List.of("test-*"), + new Template(null, null, null), + List.of("subobjects", "field_mapping"), + 0L, + 1L, + null, + null, + null + ); + state = service.addIndexTemplateV2(state, true, "composable-template", it); + + List mappings = MetadataIndexTemplateService.collectMappings(state, "composable-template", "test-index"); + + assertNotNull(mappings); + assertThat(mappings.size(), equalTo(2)); + List> parsedMappings = mappings.stream().map(m -> { + try { + return MapperService.parseMapping(NamedXContentRegistry.EMPTY, m); + } catch (Exception e) { + logger.error(e); + fail("failed to parse mappings: " + m.string()); + return null; + } + }).toList(); + + assertThat(parsedMappings.get(0), equalTo(Map.of("_doc", Map.of("subobjects", false)))); + assertThat( + parsedMappings.get(1), + equalTo(Map.of("_doc", Map.of("properties", Map.of("parent.subfield", Map.of("type", "keyword"))))) + ); + } + private static List putTemplate(NamedXContentRegistry xContentRegistry, PutRequest request) { ThreadPool testThreadPool = mock(ThreadPool.class); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java index 7dccae9ec6086..5b50eb63e1489 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java @@ -29,6 +29,7 @@ import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class XContentHelperTests extends ESTestCase { @@ -66,6 +67,172 @@ public void testMergingListValuesAreMapsOfOne() { assertThat(content, equalTo(expected)); } + public void testMergingDefaults() { + Map base = getMap("key1", "old", "key3", "old", "map", getMap("key1", "old", "key3", "old")); + Map toMerge = getMap("key2", "new", "key3", "new", "map", getMap("key2", "new", "key3", "new")); + XContentHelper.mergeDefaults(base, toMerge); + Map expected = getMap( + "key1", + "old", + "key2", + "new", + "key3", + "old", + "map", + Map.of("key1", "old", "key2", "new", "key3", "old") + ); + assertThat(base, equalTo(expected)); + } + + public void testMergingWithCustomMerge() { + Map base = getMap("key1", "old", "key3", "old", "key4", "old"); + Map toMerge = getMap("key2", "new", "key3", "new", "key4", "new"); + XContentHelper.merge(base, toMerge, (parent, key, oldValue, newValue) -> "key3".equals(key) ? newValue : oldValue); + Map expected = getMap("key1", "old", "key2", "new", "key3", "new", "key4", "old"); + assertThat(base, equalTo(expected)); + } + + public void testMergingWithCustomMapReplacement() { + Map base = getMap( + "key1", + "old", + "key3", + "old", + "key4", + "old", + "map", + Map.of("key1", "old", "key3", "old", "key4", "old") + ); + Map toMerge = getMap( + "key2", + "new", + "key3", + "new", + "key4", + "new", + "map", + Map.of("key2", "new", "key3", "new", "key4", "new") + ); + XContentHelper.merge( + base, + toMerge, + (parent, key, oldValue, newValue) -> "key3".equals(key) || "map".equals(key) ? newValue : oldValue + ); + Map expected = getMap( + "key1", + "old", + "key2", + "new", + "key3", + "new", + "key4", + "old", + "map", + Map.of("key2", "new", "key3", "new", "key4", "new") + ); + assertThat(base, equalTo(expected)); + } + + public void testMergingWithCustomMapMerge() { + Map base = getMap( + "key1", + "old", + "key3", + "old", + "key4", + "old", + "map", + new HashMap<>(Map.of("key1", "old", "key3", "old", "key4", "old")) + ); + Map toMerge = getMap( + "key2", + "new", + "key3", + "new", + "key4", + "new", + "map", + Map.of("key2", "new", "key3", "new", "key4", "new") + ); + XContentHelper.merge(base, toMerge, (parent, key, oldValue, newValue) -> "key3".equals(key) ? oldValue : null); + Map expected = getMap( + "key1", + "old", + "key2", + "new", + "key3", + "old", + "key4", + "old", + "map", + Map.of("key1", "old", "key2", "new", "key3", "old", "key4", "old") + ); + assertThat(base, equalTo(expected)); + } + + public void testMergingListValueWithCustomMapReplacement() { + Map base = getMap( + "key", + List.of("value1", "value3", "value4"), + "list", + List.of(new HashMap<>(Map.of("map", new HashMap<>(Map.of("key1", "old", "key3", "old", "key4", "old"))))) + ); + Map toMerge = getMap( + "key", + List.of("value1", "value2", "value4"), + "list", + List.of(Map.of("map", Map.of("key2", "new", "key3", "new", "key4", "new"))) + ); + XContentHelper.merge( + base, + toMerge, + (parent, key, oldValue, newValue) -> "key3".equals(key) || "map".equals(key) ? newValue : oldValue + ); + Map expected = getMap( + "key", + List.of("value1", "value2", "value4", "value3"), + "list", + List.of(Map.of("map", Map.of("key2", "new", "key3", "new", "key4", "new"))) + ); + assertThat(base, equalTo(expected)); + } + + public void testMergingListValueWithCustomMapMerge() { + Map base = getMap( + "key", + List.of("value1", "value3", "value4"), + "list", + List.of(new HashMap<>(Map.of("map", new HashMap<>(Map.of("key1", "old", "key3", "old", "key4", "old"))))) + ); + Map toMerge = getMap( + "key", + List.of("value1", "value2", "value4"), + "list", + List.of(Map.of("map", Map.of("key2", "new", "key3", "new", "key4", "new"))) + ); + XContentHelper.merge(base, toMerge, (parent, key, oldValue, newValue) -> "key3".equals(key) ? newValue : null); + Map expected = getMap( + "key", + List.of("value1", "value2", "value4", "value3"), + "list", + List.of(Map.of("map", Map.of("key1", "old", "key2", "new", "key3", "new", "key4", "old"))) + ); + assertThat(base, equalTo(expected)); + } + + public void testMergingWithCustomMergeWithException() { + final Map base = getMap("key1", "old", "key3", "old", "key4", "old"); + final Map toMerge = getMap("key2", "new", "key3", "new", "key4", "new"); + final XContentHelper.CustomMerge customMerge = (parent, key, oldValue, newValue) -> { + if ("key3".equals(key)) { + throw new IllegalArgumentException(key + " is not allowed"); + } + return oldValue; + }; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> XContentHelper.merge(base, toMerge, customMerge)); + assertThat(e.getMessage(), containsString("key3 is not allowed")); + } + public void testToXContent() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); final ToXContent toXContent; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index a86a417a37632..f0458add93c78 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -95,14 +95,14 @@ public void testMergeObjectAndNested() throws Exception { IllegalArgumentException.class, () -> MapperService.mergeMappings(objectMapper, nestedMapper.mapping(), reason) ); - assertThat(e.getMessage(), containsString("can't merge a nested mapping [obj] with a non-nested mapping")); + assertThat(e.getMessage(), containsString("can't merge a non-nested mapping [obj] with a nested mapping")); } { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> MapperService.mergeMappings(nestedMapper, objectMapper.mapping(), reason) ); - assertThat(e.getMessage(), containsString("can't merge a non nested mapping [obj] with a nested mapping")); + assertThat(e.getMessage(), containsString("can't merge a non-nested mapping [obj] with a nested mapping")); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 6c17c97067612..6c2a02df1db24 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -310,7 +310,7 @@ public void testDynamicTemplates() throws Exception { } public void testDynamicTemplatesForIndexTemplate() throws IOException { - String mapping = Strings.toString( + String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() .startArray("dynamic_templates") @@ -333,11 +333,9 @@ public void testDynamicTemplatesForIndexTemplate() throws IOException { .endArray() .endObject() ); - MapperService mapperService = createMapperService(IndexVersion.current(), Settings.EMPTY, () -> true); - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.INDEX_TEMPLATE); // There should be no update if templates are not set. - mapping = Strings.toString( + String mapping2 = Strings.toString( XContentFactory.jsonBuilder() .startObject() .startObject("properties") @@ -347,9 +345,11 @@ public void testDynamicTemplatesForIndexTemplate() throws IOException { .endObject() .endObject() ); + + MapperService mapperService = createMapperService(IndexVersion.current(), Settings.EMPTY, () -> true); DocumentMapper mapper = mapperService.merge( MapperService.SINGLE_MAPPING_NAME, - new CompressedXContent(mapping), + List.of(new CompressedXContent(mapping1), new CompressedXContent(mapping2)), MapperService.MergeReason.INDEX_TEMPLATE ); @@ -361,7 +361,7 @@ public void testDynamicTemplatesForIndexTemplate() throws IOException { assertEquals("second", templates[1].pathMatch().get(0)); // Dynamic templates should be appended and deduplicated. - mapping = Strings.toString( + String mapping3 = Strings.toString( XContentFactory.jsonBuilder() .startObject() .startArray("dynamic_templates") @@ -386,7 +386,7 @@ public void testDynamicTemplatesForIndexTemplate() throws IOException { ); mapper = mapperService.merge( MapperService.SINGLE_MAPPING_NAME, - new CompressedXContent(mapping), + new CompressedXContent(mapping3), MapperService.MergeReason.INDEX_TEMPLATE ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 32df73fea9d30..69b44d383193a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -27,6 +27,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -416,4 +417,823 @@ to get the wrong path (missing the first portion). List fields = parsedDocument.rootDoc().getFields("obj.sub.string"); assertEquals(1, fields.size()); } + + public void testBulkMerge() throws IOException { + final MapperService mapperService = createMapperService(mapping(b -> {})); + CompressedXContent mapping1 = createTestMapping1(); + CompressedXContent mapping2 = createTestMapping2(); + mapperService.merge("_doc", mapping1, MergeReason.INDEX_TEMPLATE); + DocumentMapper sequentiallyMergedMapper = mapperService.merge("_doc", mapping2, MergeReason.INDEX_TEMPLATE); + DocumentMapper bulkMergedMapper = mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + assertEquals(sequentiallyMergedMapper.mappingSource(), bulkMergedMapper.mappingSource()); + } + + public void testMergeSubobjectsFalseOrder() throws IOException { + final MapperService mapperService = createMapperService(mapping(b -> {})); + CompressedXContent mapping1 = createTestMapping1(); + CompressedXContent mapping2 = createTestMapping2(); + DocumentMapper subobjectsFirst = mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + DocumentMapper subobjectsLast = mapperService.merge("_doc", List.of(mapping2, mapping1), MergeReason.INDEX_TEMPLATE); + assertEquals(subobjectsFirst.mappingSource(), subobjectsLast.mappingSource()); + } + + private static CompressedXContent createTestMapping1() throws IOException { + CompressedXContent mapping1; + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + mapping1 = new CompressedXContent( + BytesReference.bytes( + xContentBuilder.startObject() + .startObject("_doc") + .field("subobjects", false) + .startObject("properties") + .startObject("parent") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + } + return mapping1; + } + + private static CompressedXContent createTestMapping2() throws IOException { + CompressedXContent mapping2; + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + mapping2 = new CompressedXContent( + BytesReference.bytes( + xContentBuilder.startObject() + .startObject("_doc") + .field("subobjects", false) + .startObject("properties") + .startObject("parent.subfield") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + } + return mapping2; + } + + public void testSubobjectsDisabledNotAtRoot() throws IOException { + final MapperService mapperService = createMapperService(mapping(b -> {})); + CompressedXContent mapping1; + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + mapping1 = new CompressedXContent( + BytesReference.bytes( + xContentBuilder.startObject() + .startObject("_doc") + .startObject("properties") + .startObject("parent") + .field("subobjects", false) + .field("type", "object") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + } + CompressedXContent mapping2; + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + mapping2 = new CompressedXContent( + BytesReference.bytes( + xContentBuilder.startObject() + .startObject("_doc") + .startObject("properties") + .startObject("parent") + .startObject("properties") + .startObject("child.grandchild") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + } + + DocumentMapper subobjectsFirst = mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + DocumentMapper subobjectsLast = mapperService.merge("_doc", List.of(mapping2, mapping1), MergeReason.INDEX_TEMPLATE); + assertEquals(subobjectsFirst.mappingSource(), subobjectsLast.mappingSource()); + } + + public void testMergeMultipleRoots() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties" : { + "field" : { + "subobjects" : false, + "type" : "object" + } + } + } + """); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "_doc" : { + "_meta" : { + "meta-field" : "some-info" + }, + "properties" : { + "field" : { + "properties" : { + "subfield" : { + "type" : "keyword" + } + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "_meta" : { + "meta-field" : "some-info" + }, + "properties" : { + "field" : { + "subobjects" : false, + "properties" : { + "subfield" : { + "type" : "keyword" + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testMergeMultipleRootsWithRootType() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties" : { + "field" : { + "type" : "keyword" + } + } + } + """); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "_doc" : { + "_meta" : { + "meta-field" : "some-info" + } + }, + "properties" : { + "field" : { + "subobjects" : false + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE) + ); + assertThat(e.getMessage(), containsString("cannot merge a map with multiple roots, one of which is [_doc]")); + } + + public void testMergeMultipleRootsWithoutRootType() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties" : { + "field" : { + "type" : "keyword" + } + } + } + """); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "_meta" : { + "meta-field" : "some-info" + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "_meta" : { + "meta-field" : "some-info" + }, + "properties" : { + "field" : { + "type" : "keyword" + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testValidMappingSubstitution() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "keyword", + "ignore_above": 1024 + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "long", + "coerce": true + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "field" : { + "type" : "long", + "coerce" : true + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testValidMappingSubtreeSubstitution() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "object", + "subobjects": false, + "properties": { + "subfield": { + "type": "keyword" + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "long", + "coerce": true + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "field" : { + "type" : "long", + "coerce" : true + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testSameTypeMerge() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "keyword", + "ignore_above": 256, + "doc_values": false, + "fields": { + "text": { + "type": "text" + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "keyword", + "ignore_above": 1024, + "fields": { + "other_text": { + "type": "text" + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "field" : { + "type" : "keyword", + "ignore_above" : 1024, + "fields" : { + "other_text" : { + "type" : "text" + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testObjectAndNestedTypeSubstitution() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties" : { + "field": { + "type": "nested", + "include_in_parent": true, + "properties": { + "subfield1": { + "type": "keyword" + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "object", + "properties": { + "subfield2": { + "type": "keyword" + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "field" : { + "properties" : { + "subfield1" : { + "type" : "keyword" + }, + "subfield2" : { + "type" : "keyword" + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testNestedContradictingProperties() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "nested", + "include_in_parent": false, + "properties": { + "subfield1": { + "type": "keyword" + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "field": { + "type": "nested", + "include_in_parent": true, + "properties": { + "subfield2": { + "type": "keyword" + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "field" : { + "type" : "nested", + "include_in_parent" : true, + "properties" : { + "subfield1" : { + "type" : "keyword" + }, + "subfield2" : { + "type" : "keyword" + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testImplicitObjectHierarchy() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent": { + "properties": { + "child.grandchild": { + "type": "keyword" + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + DocumentMapper bulkMerge = mapperService.merge("_doc", List.of(mapping1), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "properties" : { + "grandchild" : { + "type" : "keyword" + } + } + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + + DocumentMapper sequentialMerge = mapperService.merge("_doc", mapping1, MergeReason.INDEX_TEMPLATE); + assertEquals(bulkMerge.mappingSource(), sequentialMerge.mappingSource()); + } + + public void testSubobjectsMerge() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent": { + "type": "object", + "subobjects": false + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "parent": { + "properties": { + "child.grandchild": { + "type": "keyword" + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "parent" : { + "subobjects" : false, + "properties" : { + "child.grandchild" : { + "type" : "keyword" + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testContradictingSubobjects() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent": { + "type": "object", + "subobjects": false, + "properties": { + "child.grandchild": { + "type": "text" + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "parent": { + "type": "object", + "subobjects": true, + "properties": { + "child.grandchild": { + "type": "long" + } + } + } + } + }"""); + + MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "parent" : { + "subobjects" : true, + "properties" : { + "child" : { + "properties" : { + "grandchild" : { + "type" : "long" + } + } + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + + mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping2, mapping1), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "parent" : { + "subobjects" : false, + "properties" : { + "child.grandchild" : { + "type" : "text" + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testSubobjectsImplicitObjectsMerge() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent": { + "type": "object", + "subobjects": false + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "parent.child.grandchild": { + "type": "keyword" + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE) + ); + assertThat(e.getMessage(), containsString("Tried to add subobject [child] to object [parent] which does not support subobjects")); + } + + public void testMultipleTypeMerges() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties" : { + "parent": { + "type": "object", + "properties": { + "child": { + "type": "object", + "properties": { + "grandchild1": { + "type": "keyword" + }, + "grandchild2": { + "type": "date" + } + } + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties" : { + "parent": { + "type": "object", + "properties": { + "child": { + "type": "nested", + "properties": { + "grandchild1": { + "type": "text" + }, + "grandchild3": { + "type": "text" + } + } + } + } + } + } + }"""); + + final MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + + assertEquals(""" + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "nested", + "properties" : { + "grandchild1" : { + "type" : "text" + }, + "grandchild2" : { + "type" : "date" + }, + "grandchild3" : { + "type" : "text" + } + } + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + } + + public void testPropertiesField() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "properties": { + "type": "object", + "properties": { + "child": { + "type": "object", + "dynamic": true, + "properties": { + "grandchild": { + "type": "keyword" + } + } + } + } + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "properties": { + "properties": { + "child": { + "type": "long", + "coerce": true + } + } + } + } + }"""); + + MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping1, mapping2), MergeReason.INDEX_TEMPLATE); + assertEquals(""" + { + "_doc" : { + "properties" : { + "properties" : { + "properties" : { + "child" : { + "type" : "long", + "coerce" : true + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + + Mapper propertiesMapper = mapperService.documentMapper().mapping().getRoot().getMapper("properties"); + assertThat(propertiesMapper, instanceOf(ObjectMapper.class)); + Mapper childMapper = ((ObjectMapper) propertiesMapper).getMapper("child"); + assertThat(childMapper, instanceOf(FieldMapper.class)); + assertEquals("long", childMapper.typeName()); + + // Now checking the opposite merge + mapperService = createMapperService(mapping(b -> {})); + mapperService.merge("_doc", List.of(mapping2, mapping1), MergeReason.INDEX_TEMPLATE); + assertEquals(""" + { + "_doc" : { + "properties" : { + "properties" : { + "properties" : { + "child" : { + "dynamic" : "true", + "properties" : { + "grandchild" : { + "type" : "keyword" + } + } + } + } + } + } + } + }""", Strings.toString(mapperService.documentMapper().mapping(), true, true)); + + propertiesMapper = mapperService.documentMapper().mapping().getRoot().getMapper("properties"); + assertThat(propertiesMapper, instanceOf(ObjectMapper.class)); + childMapper = ((ObjectMapper) propertiesMapper).getMapper("child"); + assertThat(childMapper, instanceOf(ObjectMapper.class)); + Mapper grandchildMapper = ((ObjectMapper) childMapper).getMapper("grandchild"); + assertThat(grandchildMapper, instanceOf(FieldMapper.class)); + assertEquals("keyword", grandchildMapper.typeName()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index d84805d570b6d..3c77bf20b37d2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -199,7 +200,6 @@ public void testFieldReplacementForIndexTemplates() throws IOException { .endObject() .endObject() ); - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MergeReason.INDEX_TEMPLATE); String update = Strings.toString( XContentFactory.jsonBuilder() @@ -220,7 +220,7 @@ public void testFieldReplacementForIndexTemplates() throws IOException { ); DocumentMapper mapper = mapperService.merge( MapperService.SINGLE_MAPPING_NAME, - new CompressedXContent(update), + List.of(new CompressedXContent(mapping), new CompressedXContent(update)), MergeReason.INDEX_TEMPLATE ); @@ -269,7 +269,6 @@ public void testDisallowFieldReplacementForIndexTemplates() throws IOException { .endObject() .endObject() ); - mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MergeReason.INDEX_TEMPLATE); String firstUpdate = Strings.toString( XContentFactory.jsonBuilder() @@ -285,11 +284,16 @@ public void testDisallowFieldReplacementForIndexTemplates() throws IOException { .endObject() .endObject() ); + + // We can only check such assertion in sequential merges. Bulk merges allow such type substitution as it replaces entire field + // mapping subtrees + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MergeReason.INDEX_TEMPLATE); + IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(firstUpdate), MergeReason.INDEX_TEMPLATE) ); - assertThat(e.getMessage(), containsString("can't merge a non object mapping [object.field2] with an object mapping")); + assertThat(e.getMessage(), containsString("can't merge a non-nested mapping [object.field2] with a nested mapping")); String secondUpdate = Strings.toString( XContentFactory.jsonBuilder() diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index ee461ba9a562a..5896ecacb9ca8 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -693,7 +693,7 @@ public void testAutoFollowDatastreamWithClosingFollowerIndex() throws Exception leaderClient().admin() .indices() .prepareCreate(indexInDatastream) - .setMapping(MetadataIndexTemplateService.DEFAULT_TIMESTAMP_MAPPING.toString()) + .setMapping(MetadataIndexTemplateService.DEFAULT_TIMESTAMP_MAPPING_WITHOUT_ROUTING.toString()) .get() ); leaderClient().prepareIndex(indexInDatastream) From 7064bc9e5c78250b5cf6509945d6b469bd0fdde1 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 13 Sep 2023 15:21:27 +0200 Subject: [PATCH 034/114] Generated field is `ml.tokens` (#99049) The generated field name is `ml.tokens` and not `ml-tokens`. --- .../search/search-your-data/semantic-search-elser.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index 642f6ac5afea1..0f07f1f4128fe 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -182,7 +182,7 @@ follow the progress. To perform semantic search, use the `text_expansion` query, and provide the query text and the ELSER model ID. The example below uses the query text "How to -avoid muscle soreness after running?", the `ml-tokens` field contains the +avoid muscle soreness after running?", the `ml.tokens` field contains the generated ELSER output: [source,console] From b925b132840dd11deddf8a22fea53a44222fc773 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 13 Sep 2023 09:38:05 -0400 Subject: [PATCH 035/114] Fix -Werror lossy conversion check (#99528) Compiling with `Werror` indicates these test files implicitly do a lossy conversion. Making the final conversion explicit in the tests. Relates: https://github.com/elastic/elasticsearch/pull/99282 --- .../java/org/elasticsearch/cluster/ClusterStateTests.java | 4 ++-- .../org/elasticsearch/cluster/metadata/MetadataTests.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index 243d30ccf811f..89aacb6f03932 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -1178,7 +1178,7 @@ public void testGetMinTransportVersion() throws IOException { public static int expectedChunkCount(ToXContent.Params params, ClusterState clusterState) { final var metrics = ClusterState.Metric.parseString(params.param("metric", "_all"), true); - int chunkCount = 0; + long chunkCount = 0; // header chunk chunkCount += 1; @@ -1242,6 +1242,6 @@ public static int expectedChunkCount(ToXContent.Params params, ClusterState clus } } - return chunkCount; + return Math.toIntExact(chunkCount); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 393c39e336fed..ad34289d37fed 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -2170,7 +2170,7 @@ public static int expectedChunkCount(ToXContent.Params params, Metadata metadata final var context = Metadata.XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM, CONTEXT_MODE_API)); // 2 chunks at the beginning - int chunkCount = 2; + long chunkCount = 2; // 1 optional chunk for persistent settings if (context != Metadata.XContentContext.API && metadata.persistentSettings().isEmpty() == false) { chunkCount += 1; @@ -2216,7 +2216,7 @@ public static int expectedChunkCount(ToXContent.Params params, Metadata metadata // 1 chunk to close metadata chunkCount += 1; - return chunkCount; + return Math.toIntExact(chunkCount); } /** From e3277e769575b499b8652bcbfba96310e1c9c529 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 13 Sep 2023 11:45:45 +0100 Subject: [PATCH 036/114] Test mutes --- .../elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java | 1 + .../downsample/DataStreamLifecycleDownsampleDisruptionIT.java | 1 + 2 files changed, 2 insertions(+) diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index 7558d76169016..d898763b78714 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -1099,6 +1099,7 @@ public void testCancelViaTasksAPI() throws Exception { assertThat(json, matchesRegex(".*task (was)?\s*cancelled.*")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99519") public void testCancelViaAsyncSearchDelete() throws Exception { Map testClusterInfo = setupTwoClusters(); String localIndex = (String) testClusterInfo.get("local.index"); diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index 9f2c82670dc4d..cef748dfded3c 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -55,6 +55,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return settings.build(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99520") @TestLogging(value = "org.elasticsearch.datastreams.lifecycle:TRACE", reason = "debugging") public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { try (InternalTestCluster cluster = internalCluster()) { From 4e1fb3fca59f8fd6bd74d92d1aa84195692d234a Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 13 Sep 2023 15:02:22 +0100 Subject: [PATCH 037/114] Automatically disable `ignore_malformed` on datastream `@timestamp` fields (#99346) Data-stream mappings require a @timestamp field to be present and configured as a date with a specific set of parameters. The index-wide setting of ignore_malformed can cause problems here if it is set to true, because it needs to be false for the @timestamp field. This commit detects if a set of mappings is configured for a datastream by checking for the presence of a DataStreamTimestampFieldMapper metadata field, and passes that information on during Mapper construction as part of the MapperBuilderContext. DateFieldMapper.Builder now checks to see if it is specifically for a data stream timestamp field, and if it is, sets ignore_malformed to false. Relates to #96051 --- docs/changelog/99346.yaml | 5 ++ .../DataStreamIndexSettingsProvider.java | 2 +- .../DataStreamGetWriteIndexTests.java | 4 +- .../DataStreamTimestampFieldMapperTests.java | 53 +++++++------ .../legacygeo/GeoJsonShapeParserTests.java | 2 +- .../legacygeo/GeoWKTShapeParserTests.java | 10 +-- .../mapper/LegacyGeoShapeFieldTypeTests.java | 2 +- .../extras/RankFeatureFieldTypeTests.java | 2 +- .../extras/ScaledFloatFieldTypeTests.java | 4 +- .../ChildrenToParentAggregatorTests.java | 2 +- .../join/mapper/JoinFieldTypeTests.java | 2 +- .../percolator/QueryBuilderStoreTests.java | 4 +- .../ICUCollationKeywordFieldTypeTests.java | 8 +- .../AnnotatedTextFieldTypeTests.java | 2 +- .../index/mapper/DateFieldMapper.java | 5 ++ .../index/mapper/DocumentMapper.java | 2 +- .../index/mapper/DocumentParser.java | 2 +- .../index/mapper/DocumentParserContext.java | 2 +- .../index/mapper/MapperBuilderContext.java | 19 +++-- .../elasticsearch/index/mapper/Mapping.java | 6 +- .../index/mapper/MappingParser.java | 9 ++- .../index/mapper/NestedObjectMapper.java | 2 +- .../index/mapper/RootObjectMapper.java | 2 +- .../index/query/QueryRewriteContext.java | 2 +- .../index/query/SearchExecutionContext.java | 2 +- .../index/MappingUpdatedActionTests.java | 2 +- .../fielddata/AbstractFieldDataTestCase.java | 2 +- .../index/fielddata/FilterFieldDataTests.java | 2 +- .../fielddata/IndexFieldDataServiceTests.java | 6 +- .../mapper/BooleanScriptFieldTypeTests.java | 2 +- .../FieldAliasMapperValidationTests.java | 10 ++- .../index/mapper/FieldTypeLookupTests.java | 14 ++-- .../index/mapper/GeoPointFieldTypeTests.java | 4 +- .../index/mapper/GeoShapeFieldTypeTests.java | 3 +- .../index/mapper/IpFieldTypeTests.java | 4 +- .../index/mapper/IpRangeFieldTypeTests.java | 2 +- .../index/mapper/KeywordFieldTypeTests.java | 11 +-- .../index/mapper/MappingLookupTests.java | 6 +- .../mapper/MultiFieldsSerializationTests.java | 2 +- .../index/mapper/NestedLookupTests.java | 2 +- .../index/mapper/NestedObjectMapperTests.java | 11 ++- .../index/mapper/NumberFieldTypeTests.java | 6 +- .../index/mapper/ObjectMapperMergeTests.java | 76 ++++++++++++------- .../index/mapper/ParametrizedMapperTests.java | 18 ++--- .../index/mapper/RangeFieldTypeTests.java | 14 ++-- .../FlattenedIndexFieldDataTests.java | 2 +- .../query/SearchExecutionContextTests.java | 8 +- .../bucket/nested/NestedAggregatorTests.java | 2 +- .../highlight/HighlightBuilderTests.java | 2 +- .../search/lookup/LeafDocLookupTests.java | 2 +- .../rescore/QueryRescorerBuilderTests.java | 4 +- .../search/sort/AbstractSortTestCase.java | 2 +- .../metadata/DataStreamTestHelper.java | 6 +- .../aggregations/AggregatorTestCase.java | 2 +- .../main/resources/data-streams-mappings.json | 3 +- .../src/main/resources/logs-mappings.json | 3 +- .../UnsignedLongFieldTypeTests.java | 4 +- .../VersionStringFieldTypeTests.java | 2 +- .../job/RollupIndexerIndexingTests.java | 9 ++- .../GeoShapeWithDocValuesFieldTypeTests.java | 12 +-- .../index/mapper/PointFieldTypeTests.java | 2 +- .../index/mapper/ShapeFieldTypeTests.java | 2 +- .../mapper/WildcardFieldAggregationTests.java | 2 +- .../mapper/WildcardFieldMapperTests.java | 6 +- .../mapper/WildcardFieldTypeTests.java | 9 ++- 65 files changed, 255 insertions(+), 181 deletions(-) create mode 100644 docs/changelog/99346.yaml diff --git a/docs/changelog/99346.yaml b/docs/changelog/99346.yaml new file mode 100644 index 0000000000000..fc6fe02e6bf14 --- /dev/null +++ b/docs/changelog/99346.yaml @@ -0,0 +1,5 @@ +pr: 99346 +summary: Automatically disable `ignore_malformed` on datastream `@timestamp` fields +area: Mapping +type: bug +issues: [] diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 45d4b4df159d8..7ec2d32851ea5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -183,7 +183,7 @@ private List findRoutingPaths(String indexName, Settings allSettings, Li // that only the first pathMatch passed in gets recognized as a time_series_dimension. To counteract // that, we wrap the mappingSnippet in a new HashMap for each pathMatch instance. .parse(pathMatch, new HashMap<>(mappingSnippet), parserContext) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); extractPath(routingPaths, mapper); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index 324b70aee080f..a9bb94658b890 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -221,7 +221,7 @@ public void setup() throws Exception { ScriptCompiler.NONE, false, IndexVersion.current() - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS); root.add( new DateFieldMapper.Builder( @@ -235,7 +235,7 @@ public void setup() throws Exception { ); MetadataFieldMapper dtfm = DataStreamTestHelper.getDataStreamTimestampFieldMapper(); Mapping mapping = new Mapping( - root.build(MapperBuilderContext.root(false)), + root.build(MapperBuilderContext.root(false, false)), new MetadataFieldMapper[] { dtfm }, Collections.emptyMap() ); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java index 9793a07abda39..40c0d9194f1b2 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java @@ -168,27 +168,36 @@ public void testValidateNotDisallowedAttribute() { public void testValidateDefaultIgnoreMalformed() throws Exception { Settings indexSettings = Settings.builder().put(FieldMapper.IGNORE_MALFORMED_SETTING.getKey(), true).build(); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> createMapperService(IndexVersion.current(), indexSettings, () -> true, timestampMapping(true, b -> { - b.startObject("@timestamp"); - b.field("type", "date"); - b.endObject(); - })) - ); - assertThat( - e.getMessage(), - equalTo("data stream timestamp field [@timestamp] has disallowed [ignore_malformed] attribute specified") - ); - - MapperService mapperService = createMapperService(IndexVersion.current(), indexSettings, () -> true, timestampMapping(true, b -> { - b.startObject("@timestamp"); - b.field("type", "date"); - b.field("ignore_malformed", false); - b.endObject(); - })); - assertThat(mapperService, notNullValue()); - assertThat(mapperService.documentMapper().mappers().getMapper("@timestamp"), notNullValue()); - assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).ignoreMalformed(), is(false)); + { + MapperService mapperService = createMapperService( + IndexVersion.current(), + indexSettings, + () -> true, + timestampMapping(true, b -> { + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + }) + ); + assertThat(mapperService, notNullValue()); + assertThat(mapperService.documentMapper().mappers().getMapper("@timestamp"), notNullValue()); + assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).ignoreMalformed(), is(false)); + } + { + MapperService mapperService = createMapperService( + IndexVersion.current(), + indexSettings, + () -> true, + timestampMapping(true, b -> { + b.startObject("@timestamp"); + b.field("type", "date"); + b.field("ignore_malformed", false); + b.endObject(); + }) + ); + assertThat(mapperService, notNullValue()); + assertThat(mapperService.documentMapper().mappers().getMapper("@timestamp"), notNullValue()); + assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).ignoreMalformed(), is(false)); + } } } diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java index 6da4bfa3a83c6..98effff65d8ed 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java @@ -385,7 +385,7 @@ public void testParse3DPolygon() throws IOException, ParseException { Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java index 38ba8622c1580..5037e0daff13e 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java @@ -293,7 +293,7 @@ public void testParseMixedDimensionPolyWithHole() throws IOException, ParseExcep parser.nextToken(); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", IndexVersion.current(), false, true) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); // test store z disabled ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> ShapeParser.parse(parser, mapperBuilder)); @@ -325,7 +325,7 @@ public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException { final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); // test store z disabled @@ -349,7 +349,7 @@ public void testParsePolyWithStoredZ() throws IOException { final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); @@ -366,7 +366,7 @@ public void testParseOpenPolygon() throws IOException { final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); final LegacyGeoShapeFieldMapper defaultMapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).coerce( false - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); ElasticsearchParseException exception = expectThrows( ElasticsearchParseException.class, () -> ShapeParser.parse(parser, defaultMapperBuilder) @@ -378,7 +378,7 @@ public void testParseOpenPolygon() throws IOException { IndexVersion.current(), false, true - ).coerce(true).build(MapperBuilderContext.root(false)); + ).coerce(true).build(MapperBuilderContext.root(false, false)); ShapeBuilder shapeBuilder = ShapeParser.parse(parser, coercingMapperBuilder); assertNotNull(shapeBuilder); assertEquals("polygon ((100.0 5.0, 100.0 10.0, 90.0 10.0, 90.0 5.0, 100.0 5.0))", shapeBuilder.toWKT()); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java index 00af2d73872fb..1e2cc84fd4520 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java @@ -37,7 +37,7 @@ public void testSetStrategyName() { public void testFetchSourceValue() throws IOException { IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ).fieldType(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java index c9bb726f8e11d..afd0d307dddbf 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java @@ -25,7 +25,7 @@ public void testIsNotAggregatable() { public void testFetchSourceValue() throws IOException { MappedFieldType mapper = new RankFeatureFieldMapper.Builder("field").nullValue(2.0f) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(3.14f), fetchSourceValue(mapper, 3.14)); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java index 830ae7b65c854..603b19623a0e7 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java @@ -218,7 +218,7 @@ public void testFieldData() throws IOException { public void testFetchSourceValue() throws IOException { MappedFieldType mapper = new ScaledFloatFieldMapper.Builder("field", false, false, null).scalingFactor(100) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(3.14), fetchSourceValue(mapper, 3.1415926)); assertEquals(List.of(3.14), fetchSourceValue(mapper, "3.1415")); @@ -226,7 +226,7 @@ public void testFetchSourceValue() throws IOException { MappedFieldType nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false, null).scalingFactor(100) .nullValue(2.71) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(2.71), fetchSourceValue(nullValueMapper, "")); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index ddc36d426edc3..c82ba3f5f26b5 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -300,7 +300,7 @@ static MappedFieldType[] withJoinFields(MappedFieldType... fieldTypes) { int i = fieldTypes.length; result[i++] = new ParentJoinFieldMapper.Builder("join_field").addRelation(PARENT_TYPE, Collections.singleton(CHILD_TYPE)) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); result[i++] = new ParentIdFieldMapper.ParentIdFieldType("join_field#" + PARENT_TYPE, false); assert i == result.length; diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/JoinFieldTypeTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/JoinFieldTypeTests.java index d6568de9d2fde..b98c848b47380 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/JoinFieldTypeTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/JoinFieldTypeTests.java @@ -19,7 +19,7 @@ public class JoinFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType fieldType = new ParentJoinFieldMapper.Builder("field").build(MapperBuilderContext.root(false)).fieldType(); + MappedFieldType fieldType = new ParentJoinFieldMapper.Builder("field").build(MapperBuilderContext.root(false, false)).fieldType(); Map parentValue = Map.of("relation", "parent"); assertEquals(List.of(parentValue), fetchSourceValue(fieldType, parentValue)); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java index 6d6353b71706a..e3439c821e81f 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java @@ -62,7 +62,9 @@ public void testStoringQueryBuilders() throws IOException { TermQueryBuilder[] queryBuilders = new TermQueryBuilder[randomIntBetween(1, 16)]; IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer()); config.setMergePolicy(NoMergePolicy.INSTANCE); - BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(MapperBuilderContext.root(false)); + BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder( + MapperBuilderContext.root(false, false) + ); MappedFieldType.FielddataOperation fielddataOperation = MappedFieldType.FielddataOperation.SEARCH; try (IndexWriter indexWriter = new IndexWriter(directory, config)) { diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java index 2af617fce86ae..a7e78cb0fdbde 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java @@ -18,18 +18,20 @@ public class ICUCollationKeywordFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build(MapperBuilderContext.root(false)); + ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build( + MapperBuilderContext.root(false, false) + ); assertEquals(List.of("42"), fetchSourceValue(mapper.fieldType(), 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper.fieldType(), true)); ICUCollationKeywordFieldMapper ignoreAboveMapper = new ICUCollationKeywordFieldMapper.Builder("field").ignoreAbove(4) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper.fieldType(), "value")); assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper.fieldType(), 42L)); assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper.fieldType(), true)); ICUCollationKeywordFieldMapper nullValueMapper = new ICUCollationKeywordFieldMapper.Builder("field").nullValue("NULL") - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper.fieldType(), null)); } } diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java index cadd341c8b1ad..1b9f3b9447378 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java @@ -30,7 +30,7 @@ public void testIntervals() throws IOException { public void testFetchSourceValue() throws IOException { MappedFieldType fieldType = new AnnotatedTextFieldMapper.Builder("field", IndexVersion.current(), createDefaultIndexAnalyzers()) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 9579d921c4176..f57b3229b8062 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -358,6 +358,11 @@ public DateFieldMapper build(MapperBuilderContext context) { ); Long nullTimestamp = parseNullValue(ft); + if (name().equals(DataStreamTimestampFieldMapper.DEFAULT_PATH) + && context.isDataStream() + && ignoreMalformed.isConfigured() == false) { + ignoreMalformed.setValue(false); + } return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo, nullTimestamp, resolution, this); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index afc5cf20cae9d..02d4aa7a756e0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -27,7 +27,7 @@ public class DocumentMapper { */ public static DocumentMapper createEmpty(MapperService mapperService) { RootObjectMapper root = new RootObjectMapper.Builder(MapperService.SINGLE_MAPPING_NAME, ObjectMapper.Defaults.SUBOBJECTS).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); MetadataFieldMapper[] metadata = mapperService.getMetadataMappers().values().toArray(new MetadataFieldMapper[0]); Mapping mapping = new Mapping(root, metadata, null); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index e4fb3cbfc8809..d98bb8b367694 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -255,7 +255,7 @@ static Mapping createDynamicUpdate(DocumentParserContext context) { for (RuntimeField runtimeField : context.getDynamicRuntimeFields()) { rootBuilder.addRuntimeField(runtimeField); } - RootObjectMapper root = rootBuilder.build(MapperBuilderContext.root(context.mappingLookup().isSourceSynthetic())); + RootObjectMapper root = rootBuilder.build(MapperBuilderContext.root(context.mappingLookup().isSourceSynthetic(), false)); return context.mappingLookup().getMapping().mappingUpdate(root); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 15265ea821e5c..700f0e492af73 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -538,7 +538,7 @@ public final MapperBuilderContext createDynamicMapperBuilderContext() { if (p.endsWith(".")) { p = p.substring(0, p.length() - 1); } - return new MapperBuilderContext(p, mappingLookup().isSourceSynthetic()); + return new MapperBuilderContext(p, mappingLookup().isSourceSynthetic(), false); } public abstract XContentParser parser(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java index 55bc15528404d..7506e8b8f6671 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java @@ -15,22 +15,21 @@ */ public class MapperBuilderContext { - private static final MapperBuilderContext ROOT_SYNTHETIC = new MapperBuilderContext(null, true); - private static final MapperBuilderContext ROOT_NOT_SYNTHETIC = new MapperBuilderContext(null, false); - /** * The root context, to be used when building a tree of mappers */ - public static MapperBuilderContext root(boolean isSourceSynthetic) { - return isSourceSynthetic ? ROOT_SYNTHETIC : ROOT_NOT_SYNTHETIC; + public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream) { + return new MapperBuilderContext(null, isSourceSynthetic, isDataStream); } private final String path; private final boolean isSourceSynthetic; + private final boolean isDataStream; - MapperBuilderContext(String path, boolean isSourceSynthetic) { + MapperBuilderContext(String path, boolean isSourceSynthetic, boolean isDataStream) { this.path = path; this.isSourceSynthetic = isSourceSynthetic; + this.isDataStream = isDataStream; } /** @@ -39,7 +38,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic) { * @return a new MapperBuilderContext with this context as its parent */ public MapperBuilderContext createChildContext(String name) { - return new MapperBuilderContext(buildFullName(name), isSourceSynthetic); + return new MapperBuilderContext(buildFullName(name), isSourceSynthetic, isDataStream); } /** @@ -59,4 +58,10 @@ public boolean isSourceSynthetic() { return isSourceSynthetic; } + /** + * Are these mappings being built for a data stream index? + */ + public boolean isDataStream() { + return isDataStream; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index a67d761522912..fb07ddbc56d83 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -30,7 +30,7 @@ public final class Mapping implements ToXContentFragment { public static final Mapping EMPTY = new Mapping( new RootObjectMapper.Builder(MapperService.SINGLE_MAPPING_NAME, ObjectMapper.Defaults.SUBOBJECTS).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ), new MetadataFieldMapper[0], null @@ -136,7 +136,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { * @return the resulting merged mapping. */ Mapping merge(Mapping mergeWith, MergeReason reason) { - RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, MapperBuilderContext.root(isSourceSynthetic())); + RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, MapperBuilderContext.root(isSourceSynthetic(), false)); // When merging metadata fields as part of applying an index template, new field definitions // completely overwrite existing ones instead of being merged. This behavior matches how we @@ -148,7 +148,7 @@ Mapping merge(Mapping mergeWith, MergeReason reason) { if (mergeInto == null || reason == MergeReason.INDEX_TEMPLATE) { merged = metaMergeWith; } else { - merged = (MetadataFieldMapper) mergeInto.merge(metaMergeWith, MapperBuilderContext.root(isSourceSynthetic())); + merged = (MetadataFieldMapper) mergeInto.merge(metaMergeWith, MapperBuilderContext.root(isSourceSynthetic(), false)); } mergedMetadataMappers.put(merged.getClass(), merged); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 4cc0a48e939b1..8b30915ca4d3c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -117,7 +117,9 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M Map, MetadataFieldMapper> metadataMappers = metadataMappersSupplier.get(); Map meta = null; + boolean isSourceSynthetic = mappingParserContext.getIndexSettings().getMode().isSyntheticSourceEnabled(); + boolean isDataStream = false; Iterator> iterator = mappingSource.entrySet().iterator(); while (iterator.hasNext()) { @@ -136,6 +138,7 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M MetadataFieldMapper metadataFieldMapper = typeParser.parse(fieldName, fieldNodeMap, mappingParserContext).build(); metadataMappers.put(metadataFieldMapper.getClass(), metadataFieldMapper); assert fieldNodeMap.isEmpty(); + if (metadataFieldMapper instanceof SourceFieldMapper sfm) { // Validation in other places should have failed first assert sfm.isSynthetic() @@ -143,6 +146,10 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M : "synthetic source can't be disabled in a time series index"; isSourceSynthetic = sfm.isSynthetic(); } + + if (metadataFieldMapper instanceof DataStreamTimestampFieldMapper dsfm) { + isDataStream = dsfm.isEnabled(); + } } } @@ -171,7 +178,7 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M } return new Mapping( - rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic)), + rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream)), metadataMappers.values().toArray(new MetadataFieldMapper[0]), meta ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index d772df91a3b40..c3ea8b1c8d65b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -108,7 +108,7 @@ private static class NestedMapperBuilderContext extends MapperBuilderContext { final boolean parentIncludedInRoot; NestedMapperBuilderContext(String path, boolean parentIncludedInRoot) { - super(path, false); + super(path, false, false); this.parentIncludedInRoot = parentIncludedInRoot; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index fa8aef1317cb1..55c6a4537f688 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -321,7 +321,7 @@ private static void validateDynamicTemplate(MappingParserContext parserContext, validate( template, dynamicType, - (name, mapping) -> typeParser.parse(name, mapping, parserContext).build(MapperBuilderContext.root(false)) + (name, mapping) -> typeParser.parse(name, mapping, parserContext).build(MapperBuilderContext.root(false, false)) ); } lastError = null; // ok, the template is valid for at least one type diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index fc6dc04faf658..109e1394a3066 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -192,7 +192,7 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap return fieldMapping; } else if (mapUnmappedFieldAsString) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, getIndexAnalyzers()); - return builder.build(MapperBuilderContext.root(false)).fieldType(); + return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } else { throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name); } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index e3a54ae0974bf..6f0c589d3f25b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -358,7 +358,7 @@ public MappedFieldType buildAnonymousFieldType(String type) { throw new IllegalArgumentException("No mapper found for type [" + type + "]"); } Mapper.Builder builder = typeParser.parse("__anonymous_", Collections.emptyMap(), parserContext); - Mapper mapper = builder.build(MapperBuilderContext.root(false)); + Mapper mapper = builder.build(MapperBuilderContext.root(false, false)); if (mapper instanceof FieldMapper) { return ((FieldMapper) mapper).fieldType(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java index 5c7a1c97e9f8b..65a5093919e02 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java @@ -146,7 +146,7 @@ public void testSendUpdateMappingUsingAutoPutMappingAction() { mua.setClient(client); RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder("name", ObjectMapper.Defaults.SUBOBJECTS).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); Mapping update = new Mapping(rootObjectMapper, new MetadataFieldMapper[0], Map.of()); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 5b81e264c3826..43628fe59daa3 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -85,7 +85,7 @@ protected Collection> getPlugins() { public > IFD getForField(String type, String fieldName, boolean docValues) { final MappedFieldType fieldType; - final MapperBuilderContext context = MapperBuilderContext.root(false); + final MapperBuilderContext context = MapperBuilderContext.root(false, false); if (type.equals("string")) { if (docValues) { fieldType = new KeywordFieldMapper.Builder(fieldName, IndexVersion.current()).build(context).fieldType(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index 93172aac32bad..45ebfba265c2f 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -48,7 +48,7 @@ public void testFilterByFrequency() throws Exception { } writer.forceMerge(1, true); List contexts = refreshReader(); - final MapperBuilderContext builderContext = MapperBuilderContext.root(false); + final MapperBuilderContext builderContext = MapperBuilderContext.root(false, false); { indexService.clearCaches(false, true); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 4c0f36677d682..046facfe690c2 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -77,7 +77,7 @@ public void testGetForFieldDefaults() { indicesService.getIndicesFieldDataCache(), indicesService.getCircuitBreakerService() ); - MapperBuilderContext context = MapperBuilderContext.root(false); + MapperBuilderContext context = MapperBuilderContext.root(false, false); final MappedFieldType stringMapper = new KeywordFieldMapper.Builder("string", IndexVersion.current()).build(context).fieldType(); ifdService.clear(); IndexFieldData fd = ifdService.getForField(stringMapper, FieldDataContext.noRuntimeFields("test")); @@ -156,7 +156,7 @@ public void testClearField() throws Exception { indicesService.getCircuitBreakerService() ); - final MapperBuilderContext context = MapperBuilderContext.root(false); + final MapperBuilderContext context = MapperBuilderContext.root(false, false); final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1", createDefaultIndexAnalyzers()).fielddata(true) .build(context) .fieldType(); @@ -223,7 +223,7 @@ public void testFieldDataCacheListener() throws Exception { indicesService.getCircuitBreakerService() ); - final MapperBuilderContext context = MapperBuilderContext.root(false); + final MapperBuilderContext context = MapperBuilderContext.root(false, false); final MappedFieldType mapper1 = new TextFieldMapper.Builder("s", createDefaultIndexAnalyzers()).fielddata(true) .build(context) .fieldType(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index 5b6b7f97759db..8a8b8e0085415 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -320,7 +320,7 @@ protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) public void testDualingQueries() throws IOException { BooleanFieldMapper ootb = new BooleanFieldMapper.Builder("foo", ScriptCompiler.NONE, false, IndexVersion.current()).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { List values = randomList(0, 2, ESTestCase::randomBoolean); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index 8fe8ff237059d..4a29dce00436a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -159,7 +159,7 @@ public void testFieldAliasWithDifferentNestedScopes() { private static FieldMapper createFieldMapper(String parent, String name) { return new BooleanFieldMapper.Builder(name, ScriptCompiler.NONE, false, IndexVersion.current()).build( - new MapperBuilderContext(parent, false) + new MapperBuilderContext(parent, false, false) ); } @@ -168,7 +168,7 @@ private static ObjectMapper createObjectMapper(String name) { } private static NestedObjectMapper createNestedObjectMapper(String name) { - return new NestedObjectMapper.Builder(name, IndexVersion.current()).build(MapperBuilderContext.root(false)); + return new NestedObjectMapper.Builder(name, IndexVersion.current()).build(MapperBuilderContext.root(false, false)); } private static MappingLookup createMappingLookup( @@ -180,7 +180,11 @@ private static MappingLookup createMappingLookup( RootObjectMapper.Builder builder = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS); Map runtimeFieldTypes = runtimeFields.stream().collect(Collectors.toMap(RuntimeField::name, r -> r)); builder.addRuntimeFields(runtimeFieldTypes); - Mapping mapping = new Mapping(builder.build(MapperBuilderContext.root(false)), new MetadataFieldMapper[0], Collections.emptyMap()); + Mapping mapping = new Mapping( + builder.build(MapperBuilderContext.root(false, false)), + new MetadataFieldMapper[0], + Collections.emptyMap() + ); return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers, fieldAliasMappers); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index f5d207847bf02..3f50b9fdf6621 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -119,10 +119,10 @@ public void testGetMatchingFieldNames() { public void testSourcePathWithMultiFields() { MockFieldMapper field = new MockFieldMapper.Builder("field").addMultiField(new MockFieldMapper.Builder("field.subfield1")) .addMultiField(new MockFieldMapper.Builder("field.subfield2.subfield3")) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); // Adding a subfield that is not multi-field - MockFieldMapper subfield = new MockFieldMapper.Builder("field.subfield4").build(MapperBuilderContext.root(false)); + MockFieldMapper subfield = new MockFieldMapper.Builder("field.subfield4").build(MapperBuilderContext.root(false, false)); FieldTypeLookup lookup = new FieldTypeLookup(List.of(field, subfield), emptyList(), emptyList()); @@ -134,19 +134,19 @@ public void testSourcePathWithMultiFields() { public void testSourcePathsWithCopyTo() { MockFieldMapper field = new MockFieldMapper.Builder("field").addMultiField(new MockFieldMapper.Builder("field.subfield1")) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); MockFieldMapper nestedField = new MockFieldMapper.Builder("field.nested").addMultiField( new MockFieldMapper.Builder("field.nested.subfield1") - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); MockFieldMapper otherField = new MockFieldMapper.Builder("other_field").copyTo("field") .copyTo("field.nested") - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); MockFieldMapper otherNestedField = new MockFieldMapper.Builder("other_field.nested").copyTo("field") .copyTo("field.nested") - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); FieldTypeLookup lookup = new FieldTypeLookup( Arrays.asList(field, nestedField, otherField, otherNestedField), @@ -422,6 +422,6 @@ public void testRuntimeFieldNameOutsideContext() { } private static FlattenedFieldMapper createFlattenedMapper(String fieldName) { - return new FlattenedFieldMapper.Builder(fieldName).build(MapperBuilderContext.root(false)); + return new FlattenedFieldMapper.Builder(fieldName).build(MapperBuilderContext.root(false, false)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index bf6094473940c..aa4dec379f085 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -30,7 +30,7 @@ public void testFetchSourceValue() throws IOException { ignoreMalformed, IndexVersion.current(), null - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1)); Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); @@ -89,7 +89,7 @@ public void testFetchSourceValue() throws IOException { public void testFetchVectorTile() throws IOException { MappedFieldType mapper = new GeoPointFieldMapper.Builder("field", ScriptCompiler.NONE, false, IndexVersion.current(), null).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ).fieldType(); final int z = randomIntBetween(1, 10); int x = randomIntBetween(0, (1 << z) - 1); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java index 4b2f64136a12d..b4dce62d16f37 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java @@ -15,7 +15,8 @@ public class GeoShapeFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new GeoShapeFieldMapper.Builder("field", true, true).build(MapperBuilderContext.root(false)).fieldType(); + MappedFieldType mapper = new GeoShapeFieldMapper.Builder("field", true, true).build(MapperBuilderContext.root(false, false)) + .fieldType(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java index 576655fca05c3..a13a24191357c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java @@ -349,7 +349,7 @@ public void testRangeQuery() { public void testFetchSourceValue() throws IOException { MappedFieldType mapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, IndexVersion.current()).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ).fieldType(); assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8::2:1")); assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1")); @@ -357,7 +357,7 @@ public void testFetchSourceValue() throws IOException { MappedFieldType nullValueMapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, IndexVersion.current()).nullValue( "2001:db8:0:0:0:0:2:7" - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of("2001:db8::2:7"), fetchSourceValue(nullValueMapper, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java index 71ddd48a91230..b066d2eaa0cf3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java @@ -15,7 +15,7 @@ public class IpRangeFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true).build(MapperBuilderContext.root(false)); + RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true).build(MapperBuilderContext.root(false, false)); Map range = Map.of("gte", "2001:db8:0:0:0:0:2:1"); assertEquals(List.of(Map.of("gte", "2001:db8::2:1")), fetchSourceValue(mapper.fieldType(), range)); assertEquals(List.of("2001:db8::2:1/32"), fetchSourceValue(mapper.fieldType(), "2001:db8:0:0:0:0:2:1/32")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index e65f0669e78ba..99b0582331a65 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -221,8 +221,9 @@ public void testNormalizeQueries() { } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new KeywordFieldMapper.Builder("field", IndexVersion.current()).build(MapperBuilderContext.root(false)) - .fieldType(); + MappedFieldType mapper = new KeywordFieldMapper.Builder("field", IndexVersion.current()).build( + MapperBuilderContext.root(false, false) + ).fieldType(); assertEquals(List.of("value"), fetchSourceValue(mapper, "value")); assertEquals(List.of("42"), fetchSourceValue(mapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper, true)); @@ -231,7 +232,7 @@ public void testFetchSourceValue() throws IOException { assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage()); MappedFieldType ignoreAboveMapper = new KeywordFieldMapper.Builder("field", IndexVersion.current()).ignoreAbove(4) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper, "value")); assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L)); @@ -242,13 +243,13 @@ public void testFetchSourceValue() throws IOException { createIndexAnalyzers(), ScriptCompiler.NONE, IndexVersion.current() - ).normalizer("lowercase").build(MapperBuilderContext.root(false)).fieldType(); + ).normalizer("lowercase").build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of("value"), fetchSourceValue(normalizerMapper, "VALUE")); assertEquals(List.of("42"), fetchSourceValue(normalizerMapper, 42L)); assertEquals(List.of("value"), fetchSourceValue(normalizerMapper, "value")); MappedFieldType nullValueMapper = new KeywordFieldMapper.Builder("field", IndexVersion.current()).nullValue("NULL") - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper, null)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index 845848b6fff0c..0308dac5fa216 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -42,7 +42,11 @@ private static MappingLookup createMappingLookup( RootObjectMapper.Builder builder = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS); Map runtimeFieldTypes = runtimeFields.stream().collect(Collectors.toMap(RuntimeField::name, r -> r)); builder.addRuntimeFields(runtimeFieldTypes); - Mapping mapping = new Mapping(builder.build(MapperBuilderContext.root(false)), new MetadataFieldMapper[0], Collections.emptyMap()); + Mapping mapping = new Mapping( + builder.build(MapperBuilderContext.root(false, false)), + new MetadataFieldMapper[0], + Collections.emptyMap() + ); return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers, emptyList()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsSerializationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsSerializationTests.java index 37bde4c922c36..712629cb27cce 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsSerializationTests.java @@ -41,7 +41,7 @@ public void testSorting() { } Mapper.Builder root = new BooleanFieldMapper.Builder("root", ScriptCompiler.NONE, false, IndexVersion.current()); - FieldMapper.MultiFields multiFields = builder.build(root, MapperBuilderContext.root(false)); + FieldMapper.MultiFields multiFields = builder.build(root, MapperBuilderContext.root(false, false)); String serialized = Strings.toString(multiFields); int lastStart = 0; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java index 07b80ee2f41e8..80ba37d8066b2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java @@ -64,7 +64,7 @@ public void testMultiLevelParents() throws IOException { } private static NestedObjectMapper buildMapper(String name) { - return new NestedObjectMapper.Builder(name, IndexVersion.current()).build(MapperBuilderContext.root(false)); + return new NestedObjectMapper.Builder(name, IndexVersion.current()).build(MapperBuilderContext.root(false, false)); } public void testAllParentFilters() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 658f2ad503b7f..82dcf46960008 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -1500,18 +1500,21 @@ public void testIndexTemplatesMergeIncludes() throws IOException { public void testMergeNested() { NestedObjectMapper firstMapper = new NestedObjectMapper.Builder("nested1", IndexVersion.current()).includeInParent(true) .includeInRoot(true) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); NestedObjectMapper secondMapper = new NestedObjectMapper.Builder("nested1", IndexVersion.current()).includeInParent(false) .includeInRoot(true) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); - MapperException e = expectThrows(MapperException.class, () -> firstMapper.merge(secondMapper, MapperBuilderContext.root(false))); + MapperException e = expectThrows( + MapperException.class, + () -> firstMapper.merge(secondMapper, MapperBuilderContext.root(false, false)) + ); assertThat(e.getMessage(), containsString("[include_in_parent] parameter can't be updated on a nested object mapping")); NestedObjectMapper result = (NestedObjectMapper) firstMapper.merge( secondMapper, MapperService.MergeReason.INDEX_TEMPLATE, - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); assertFalse(result.isIncludeInParent()); assertTrue(result.isIncludeInRoot()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 7084535e59d47..20b9661ad0b42 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -813,7 +813,7 @@ public void testFetchSourceValue() throws IOException { true, IndexVersion.current(), null - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of(3), fetchSourceValue(mapper, 3.14)); assertEquals(List.of(42), fetchSourceValue(mapper, "42.9")); assertEquals(List.of(3, 42), fetchSourceValues(mapper, 3.14, "foo", "42.9")); @@ -826,7 +826,7 @@ public void testFetchSourceValue() throws IOException { true, IndexVersion.current(), null - ).nullValue(2.71f).build(MapperBuilderContext.root(false)).fieldType(); + ).nullValue(2.71f).build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of(2.71f), fetchSourceValue(nullValueMapper, "")); assertEquals(List.of(2.71f), fetchSourceValue(nullValueMapper, null)); } @@ -840,7 +840,7 @@ public void testFetchHalfFloatFromSource() throws IOException { true, IndexVersion.current(), null - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); /* * Half float loses a fair bit of precision compared to float but * we still do floating point comparisons. The "funny" trailing diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 6c47f2055401e..013f041dc2499 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -33,7 +33,7 @@ private RootObjectMapper createMapping( fooBuilder.add(new TextFieldMapper.Builder("baz", createDefaultIndexAnalyzers())); } rootBuilder.add(fooBuilder); - return rootBuilder.build(MapperBuilderContext.root(false)); + return rootBuilder.build(MapperBuilderContext.root(false, false)); } public void testMerge() { @@ -41,7 +41,7 @@ public void testMerge() { ObjectMapper mergeWith = createMapping(false, true, true, true); // WHEN merging mappings - final ObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false)); + final ObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false)); // THEN "baz" new field is added to merged mapping final ObjectMapper mergedFoo = (ObjectMapper) merged.getMapper("foo"); @@ -61,7 +61,10 @@ public void testMergeWhenDisablingField() { // WHEN merging mappings // THEN a MapperException is thrown with an excepted message - MapperException e = expectThrows(MapperException.class, () -> rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false))); + MapperException e = expectThrows( + MapperException.class, + () -> rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false)) + ); assertEquals("the [enabled] parameter can't be updated for the object mapping [foo]", e.getMessage()); } @@ -70,32 +73,48 @@ public void testMergeDisabledField() { // the field is disabled, and we are not trying to re-enable it, hence merge should work RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE) - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false)); + RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false)); assertFalse(((ObjectMapper) merged.getMapper("disabled")).isEnabled()); } public void testMergeEnabled() { ObjectMapper mergeWith = createMapping(true, true, true, false); - MapperException e = expectThrows(MapperException.class, () -> rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false))); + MapperException e = expectThrows( + MapperException.class, + () -> rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false)) + ); assertEquals("the [enabled] parameter can't be updated for the object mapping [disabled]", e.getMessage()); - ObjectMapper result = rootObjectMapper.merge(mergeWith, MapperService.MergeReason.INDEX_TEMPLATE, MapperBuilderContext.root(false)); + ObjectMapper result = rootObjectMapper.merge( + mergeWith, + MapperService.MergeReason.INDEX_TEMPLATE, + MapperBuilderContext.root(false, false) + ); assertTrue(result.isEnabled()); } public void testMergeEnabledForRootMapper() { String type = MapperService.SINGLE_MAPPING_NAME; - ObjectMapper firstMapper = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).build(MapperBuilderContext.root(false)); + ObjectMapper firstMapper = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).build( + MapperBuilderContext.root(false, false) + ); ObjectMapper secondMapper = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).enabled(false) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); - MapperException e = expectThrows(MapperException.class, () -> firstMapper.merge(secondMapper, MapperBuilderContext.root(false))); + MapperException e = expectThrows( + MapperException.class, + () -> firstMapper.merge(secondMapper, MapperBuilderContext.root(false, false)) + ); assertEquals("the [enabled] parameter can't be updated for the object mapping [" + type + "]", e.getMessage()); - ObjectMapper result = firstMapper.merge(secondMapper, MapperService.MergeReason.INDEX_TEMPLATE, MapperBuilderContext.root(false)); + ObjectMapper result = firstMapper.merge( + secondMapper, + MapperService.MergeReason.INDEX_TEMPLATE, + MapperBuilderContext.root(false, false) + ); assertFalse(result.isEnabled()); } @@ -103,13 +122,13 @@ public void testMergeDisabledRootMapper() { String type = MapperService.SINGLE_MAPPING_NAME; final RootObjectMapper rootObjectMapper = (RootObjectMapper) new RootObjectMapper.Builder(type, ObjectMapper.Defaults.SUBOBJECTS) .enabled(false) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); // the root is disabled, and we are not trying to re-enable it, but we do want to be able to add runtime fields final RootObjectMapper mergeWith = new RootObjectMapper.Builder(type, ObjectMapper.Defaults.SUBOBJECTS).addRuntimeFields( Collections.singletonMap("test", new TestRuntimeField("test", "long")) - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false)); + RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge(mergeWith, MapperBuilderContext.root(false, false)); assertFalse(merged.isEnabled()); assertEquals(1, merged.runtimeFields().size()); assertEquals("test", merged.runtimeFields().iterator().next().name()); @@ -119,7 +138,7 @@ public void testMergedFieldNamesFieldWithDotsSubobjectsFalseAtRoot() { RootObjectMapper mergeInto = createRootSubobjectFalseLeafWithDots(); RootObjectMapper mergeWith = createRootSubobjectFalseLeafWithDots(); - final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false)); + final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false)); final KeywordFieldMapper keywordFieldMapper = (KeywordFieldMapper) merged.getMapper("host.name"); assertEquals("host.name", keywordFieldMapper.name()); @@ -129,12 +148,12 @@ public void testMergedFieldNamesFieldWithDotsSubobjectsFalseAtRoot() { public void testMergedFieldNamesFieldWithDotsSubobjectsFalse() { RootObjectMapper mergeInto = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( createObjectSubobjectsFalseLeafWithDots() - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( createObjectSubobjectsFalseLeafWithDots() - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); - final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false)); + final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false)); ObjectMapper foo = (ObjectMapper) merged.getMapper("foo"); ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics"); @@ -145,11 +164,11 @@ public void testMergedFieldNamesFieldWithDotsSubobjectsFalse() { public void testMergedFieldNamesMultiFields() { RootObjectMapper mergeInto = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(createTextKeywordMultiField("text")) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add(createTextKeywordMultiField("text")) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); - final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false)); + final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false)); TextFieldMapper text = (TextFieldMapper) merged.getMapper("text"); assertEquals("text", text.name()); @@ -162,12 +181,12 @@ public void testMergedFieldNamesMultiFields() { public void testMergedFieldNamesMultiFieldsWithinSubobjectsFalse() { RootObjectMapper mergeInto = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( createObjectSubobjectsFalseLeafWithMultiField() - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( createObjectSubobjectsFalseLeafWithMultiField() - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, false)); - final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false)); + final ObjectMapper merged = mergeInto.merge(mergeWith, MapperBuilderContext.root(false, false)); ObjectMapper foo = (ObjectMapper) merged.getMapper("foo"); ObjectMapper metrics = (ObjectMapper) foo.getMapper("metrics"); @@ -181,15 +200,16 @@ public void testMergedFieldNamesMultiFieldsWithinSubobjectsFalse() { private static RootObjectMapper createRootSubobjectFalseLeafWithDots() { FieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current()); - FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false)); + FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false, false)); assertEquals("host.name", fieldMapper.simpleName()); assertEquals("host.name", fieldMapper.name()); - return new RootObjectMapper.Builder("_doc", Explicit.EXPLICIT_FALSE).add(fieldBuilder).build(MapperBuilderContext.root(false)); + return new RootObjectMapper.Builder("_doc", Explicit.EXPLICIT_FALSE).add(fieldBuilder) + .build(MapperBuilderContext.root(false, false)); } private static ObjectMapper.Builder createObjectSubobjectsFalseLeafWithDots() { KeywordFieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current()); - KeywordFieldMapper fieldMapper = fieldBuilder.build(new MapperBuilderContext("foo.metrics", false)); + KeywordFieldMapper fieldMapper = fieldBuilder.build(new MapperBuilderContext("foo.metrics", false, false)); assertEquals("host.name", fieldMapper.simpleName()); assertEquals("foo.metrics.host.name", fieldMapper.name()); return new ObjectMapper.Builder("foo", ObjectMapper.Defaults.SUBOBJECTS).add( @@ -199,7 +219,7 @@ private static ObjectMapper.Builder createObjectSubobjectsFalseLeafWithDots() { private ObjectMapper.Builder createObjectSubobjectsFalseLeafWithMultiField() { TextFieldMapper.Builder fieldBuilder = createTextKeywordMultiField("host.name"); - TextFieldMapper textKeywordMultiField = fieldBuilder.build(new MapperBuilderContext("foo.metrics", false)); + TextFieldMapper textKeywordMultiField = fieldBuilder.build(new MapperBuilderContext("foo.metrics", false, false)); assertEquals("host.name", textKeywordMultiField.simpleName()); assertEquals("foo.metrics.host.name", textKeywordMultiField.name()); FieldMapper fieldMapper = textKeywordMultiField.multiFields.iterator().next(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 8ff319134b964..191822ad07cfe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -282,7 +282,7 @@ private static TestMapper fromMapping( pc = pc.createDynamicTemplateContext(null); } return (TestMapper) new TypeParser().parse("field", XContentHelper.convertToMap(JsonXContent.jsonXContent, mapping, true), pc) - .build(MapperBuilderContext.root(false)); + .build(MapperBuilderContext.root(false, false)); } private static TestMapper fromMapping(String mapping, IndexVersion version, TransportVersion transportVersion) { @@ -344,7 +344,7 @@ public void testMerging() { {"type":"test_mapper","fixed":true,"fixed2":true,"required":"value"}"""); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapper.merge(badMerge, MapperBuilderContext.root(false)) + () -> mapper.merge(badMerge, MapperBuilderContext.root(false, false)) ); String expectedError = """ Mapper for [field] conflicts with existing mapper: @@ -357,7 +357,7 @@ public void testMerging() { // TODO: should we have to include 'fixed' here? Or should updates take as 'defaults' the existing values? TestMapper goodMerge = fromMapping(""" {"type":"test_mapper","fixed":false,"variable":"updated","required":"value"}"""); - TestMapper merged = (TestMapper) mapper.merge(goodMerge, MapperBuilderContext.root(false)); + TestMapper merged = (TestMapper) mapper.merge(goodMerge, MapperBuilderContext.root(false, false)); assertEquals("{\"field\":" + mapping + "}", Strings.toString(mapper)); // original mapping is unaffected assertEquals(""" @@ -375,7 +375,7 @@ public void testMultifields() throws IOException { String addSubField = """ {"type":"test_mapper","variable":"foo","required":"value","fields":{"sub2":{"type":"keyword"}}}"""; TestMapper toMerge = fromMapping(addSubField); - TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperBuilderContext.root(false)); + TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperBuilderContext.root(false, false)); assertEquals(XContentHelper.stripWhitespace(""" { "field": { @@ -398,7 +398,7 @@ public void testMultifields() throws IOException { TestMapper badToMerge = fromMapping(badSubField); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> merged.merge(badToMerge, MapperBuilderContext.root(false)) + () -> merged.merge(badToMerge, MapperBuilderContext.root(false, false)) ); assertEquals("mapper [field.sub2] cannot be changed from type [keyword] to [binary]", e.getMessage()); } @@ -414,13 +414,13 @@ public void testCopyTo() { TestMapper toMerge = fromMapping(""" {"type":"test_mapper","variable":"updated","required":"value","copy_to":["foo","bar"]}"""); - TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperBuilderContext.root(false)); + TestMapper merged = (TestMapper) mapper.merge(toMerge, MapperBuilderContext.root(false, false)); assertEquals(""" {"field":{"type":"test_mapper","variable":"updated","required":"value","copy_to":["foo","bar"]}}""", Strings.toString(merged)); TestMapper removeCopyTo = fromMapping(""" {"type":"test_mapper","variable":"updated","required":"value"}"""); - TestMapper noCopyTo = (TestMapper) merged.merge(removeCopyTo, MapperBuilderContext.root(false)); + TestMapper noCopyTo = (TestMapper) merged.merge(removeCopyTo, MapperBuilderContext.root(false, false)); assertEquals(""" {"field":{"type":"test_mapper","variable":"updated","required":"value"}}""", Strings.toString(noCopyTo)); } @@ -486,7 +486,7 @@ public void testCustomSerialization() { TestMapper toMerge = fromMapping(conflict); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapper.merge(toMerge, MapperBuilderContext.root(false)) + () -> mapper.merge(toMerge, MapperBuilderContext.root(false, false)) ); assertEquals( "Mapper for [field] conflicts with existing mapper:\n" @@ -575,7 +575,7 @@ public void testAnalyzers() { TestMapper original = mapper; TestMapper toMerge = fromMapping(mapping); - e = expectThrows(IllegalArgumentException.class, () -> original.merge(toMerge, MapperBuilderContext.root(false))); + e = expectThrows(IllegalArgumentException.class, () -> original.merge(toMerge, MapperBuilderContext.root(false, false))); assertEquals( "Mapper for [field] conflicts with existing mapper:\n" + "\tCannot update parameter [analyzer] from [default] to [_standard]", e.getMessage() diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 5bd9eb39d6bf4..5fe3711b1d034 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -486,26 +486,28 @@ public void testCaseInsensitiveQuery() throws Exception { } public void testFetchSourceValue() throws IOException { - MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(MapperBuilderContext.root(false)) - .fieldType(); + MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build( + MapperBuilderContext.root(false, false) + ).fieldType(); Map longRange = Map.of("gte", 3.14, "lt", "42.9"); assertEquals(List.of(Map.of("gte", 3L, "lt", 42L)), fetchSourceValue(longMapper, longRange)); MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("yyyy/MM/dd||epoch_millis") - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); Map dateRange = Map.of("lt", "1990/12/29", "gte", 597429487111L); assertEquals(List.of(Map.of("lt", "1990/12/29", "gte", "1988/12/06")), fetchSourceValue(dateMapper, dateRange)); } public void testParseSourceValueWithFormat() throws IOException { - MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(MapperBuilderContext.root(false)) - .fieldType(); + MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build( + MapperBuilderContext.root(false, false) + ).fieldType(); Map longRange = Map.of("gte", 3.14, "lt", "42.9"); assertEquals(List.of(Map.of("gte", 3L, "lt", 42L)), fetchSourceValue(longMapper, longRange)); MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("strict_date_time") - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); Map dateRange = Map.of("lt", "1990-12-29T00:00:00.000Z"); assertEquals(List.of(Map.of("lt", "1990/12/29")), fetchSourceValue(dateMapper, dateRange, "yyy/MM/dd")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java index 7b6bd2f43fe65..7c8f85595e3fe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java @@ -46,7 +46,7 @@ public void testGlobalFieldDataCaching() throws IOException { indicesService.getCircuitBreakerService() ); - FlattenedFieldMapper fieldMapper = new FlattenedFieldMapper.Builder("flattened").build(MapperBuilderContext.root(false)); + FlattenedFieldMapper fieldMapper = new FlattenedFieldMapper.Builder("flattened").build(MapperBuilderContext.root(false, false)); MappedFieldType fieldType1 = fieldMapper.fieldType().getChildFieldType("key"); AtomicInteger onCacheCalled = new AtomicInteger(); diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 6ed596b956888..5cf329b76bb3f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -289,7 +289,11 @@ private static MappingLookup createMappingLookup(List concreteF RootObjectMapper.Builder builder = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS); Map runtimeFieldTypes = runtimeFields.stream().collect(Collectors.toMap(RuntimeField::name, r -> r)); builder.addRuntimeFields(runtimeFieldTypes); - Mapping mapping = new Mapping(builder.build(MapperBuilderContext.root(false)), new MetadataFieldMapper[0], Collections.emptyMap()); + Mapping mapping = new Mapping( + builder.build(MapperBuilderContext.root(false, false)), + new MetadataFieldMapper[0], + Collections.emptyMap() + ); return MappingLookup.fromMappers(mapping, mappers, Collections.emptyList(), Collections.emptyList()); } @@ -379,7 +383,7 @@ public void testSyntheticSourceScriptLoading() throws IOException { // Build a mapping using synthetic source SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); - RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).build(MapperBuilderContext.root(true)); + RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).build(MapperBuilderContext.root(true, false)); Mapping mapping = new Mapping(root, new MetadataFieldMapper[] { sourceMapper }, Map.of()); MappingLookup lookup = MappingLookup.fromMapping(mapping); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 62463dd9a2548..fd848895e25f6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -906,6 +906,6 @@ protected List objectMappers() { ); public static NestedObjectMapper nestedObject(String path) { - return new NestedObjectMapper.Builder(path, IndexVersion.current()).build(MapperBuilderContext.root(false)); + return new NestedObjectMapper.Builder(path, IndexVersion.current()).build(MapperBuilderContext.root(false, false)); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 13da42bb51789..efa2456e31af5 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -318,7 +318,7 @@ public void testBuildSearchContextHighlight() throws IOException { @Override public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); - return builder.build(MapperBuilderContext.root(false)).fieldType(); + return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; mockContext.setMapUnmappedFieldAsString(true); diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java index 679109d30d02d..40baaa68452f7 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java @@ -86,7 +86,7 @@ public void testFlattenedField() throws IOException { ScriptDocValues docValues2 = mock(ScriptDocValues.class); IndexFieldData fieldData2 = createFieldData(docValues2, "flattened.key2"); - FlattenedFieldMapper fieldMapper = new FlattenedFieldMapper.Builder("field").build(MapperBuilderContext.root(false)); + FlattenedFieldMapper fieldMapper = new FlattenedFieldMapper.Builder("field").build(MapperBuilderContext.root(false, false)); DynamicFieldType fieldType = fieldMapper.fieldType(); MappedFieldType fieldType1 = fieldType.getChildFieldType("key1"); MappedFieldType fieldType2 = fieldType.getChildFieldType("key2"); diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index ebf6629bc3194..b7979c70d0d52 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -156,7 +156,7 @@ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, @Override public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); - return builder.build(MapperBuilderContext.root(false)).fieldType(); + return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; @@ -218,7 +218,7 @@ public void testRewritingKeepsSettings() throws IOException { @Override public MappedFieldType getFieldType(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); - return builder.build(MapperBuilderContext.root(false)).fieldType(); + return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 3133a81895af6..e0c12a594bef0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -194,7 +194,7 @@ protected final SearchExecutionContext createMockSearchExecutionContext(IndexSea return builder.build(new IndexFieldDataCache.None(), null); }; NestedLookup nestedLookup = NestedLookup.build( - List.of(new NestedObjectMapper.Builder("path", IndexVersion.current()).build(MapperBuilderContext.root(false))) + List.of(new NestedObjectMapper.Builder("path", IndexVersion.current()).build(MapperBuilderContext.root(false, false))) ); return new SearchExecutionContext( 0, diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index bb1f1dde13d12..0466e565ad95a 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -478,7 +478,7 @@ public static MetadataRolloverService getMetadataRolloverService( ScriptCompiler.NONE, false, IndexVersion.current() - ).build(MapperBuilderContext.root(false)); + ).build(MapperBuilderContext.root(false, true)); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); Environment env = mock(Environment.class); when(env.sharedDataFile()).thenReturn(null); @@ -500,7 +500,7 @@ public static MetadataRolloverService getMetadataRolloverService( ); MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper(); Mapping mapping = new Mapping( - root.build(MapperBuilderContext.root(false)), + root.build(MapperBuilderContext.root(false, true)), new MetadataFieldMapper[] { dtfm }, Collections.emptyMap() ); @@ -556,7 +556,7 @@ public static IndicesService mockIndicesServices(MappingLookup mappingLookup) th MapperService mapperService = mock(MapperService.class); RootObjectMapper root = new RootObjectMapper.Builder(MapperService.SINGLE_MAPPING_NAME, ObjectMapper.Defaults.SUBOBJECTS).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ); Mapping mapping = new Mapping(root, new MetadataFieldMapper[0], null); DocumentMapper documentMapper = mock(DocumentMapper.class); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 9eb9a76d1e1ed..ee7a8723a5a27 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -1064,7 +1064,7 @@ public void testSupportedFieldTypes() throws IOException { IndexSettings indexSettings = createIndexSettings(); Mapper.Builder builder = mappedType.getValue().parse(fieldName, source, new MockParserContext(indexSettings)); - FieldMapper mapper = (FieldMapper) builder.build(MapperBuilderContext.root(false)); + FieldMapper mapper = (FieldMapper) builder.build(MapperBuilderContext.root(false, false)); MappedFieldType fieldType = mapper.fieldType(); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json index e75a297c04c85..f87c0e79b7c45 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json @@ -33,8 +33,7 @@ "date_detection": false, "properties": { "@timestamp": { - "type": "date", - "ignore_malformed": false + "type": "date" }, "data_stream": { "properties": { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json index 4ef0c56ed78b8..7417d4809559d 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json @@ -4,8 +4,7 @@ "date_detection": false, "properties": { "@timestamp": { - "type": "date", - "ignore_malformed": false + "type": "date" }, "data_stream.type": { "type": "constant_keyword", diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java index bf37412099f57..e5f85f8b87b12 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java @@ -168,7 +168,7 @@ public void testParseUpperTermForRangeQuery() { } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new UnsignedLongFieldMapper.Builder("field", false, null).build(MapperBuilderContext.root(false)) + MappedFieldType mapper = new UnsignedLongFieldMapper.Builder("field", false, null).build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(0L), fetchSourceValue(mapper, 0L)); assertEquals(List.of(9223372036854775807L), fetchSourceValue(mapper, 9223372036854775807L)); @@ -176,7 +176,7 @@ public void testFetchSourceValue() throws IOException { assertEquals(List.of(), fetchSourceValue(mapper, "")); MappedFieldType nullValueMapper = new UnsignedLongFieldMapper.Builder("field", false, null).nullValue("18446744073709551615") - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(BIGINTEGER_2_64_MINUS_ONE), fetchSourceValue(nullValueMapper, "")); } diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java index 67d2dbed28124..1e9a21c46c754 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java @@ -17,7 +17,7 @@ public class VersionStringFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new VersionStringFieldMapper.Builder("field").build(MapperBuilderContext.root(false)).fieldType(); + MappedFieldType mapper = new VersionStringFieldMapper.Builder("field").build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of("value"), fetchSourceValue(mapper, "value")); assertEquals(List.of("42"), fetchSourceValue(mapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper, true)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index d2b1a1d7e6772..a1807026861da 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -721,15 +721,16 @@ private Map createFieldTypes(RollupJobConfig job) { false, IndexVersion.current(), null - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); fieldTypes.put(ft.name(), ft); } } if (job.getGroupConfig().getTerms() != null) { for (String field : job.getGroupConfig().getTerms().getFields()) { - MappedFieldType ft = new KeywordFieldMapper.Builder(field, IndexVersion.current()).build(MapperBuilderContext.root(false)) - .fieldType(); + MappedFieldType ft = new KeywordFieldMapper.Builder(field, IndexVersion.current()).build( + MapperBuilderContext.root(false, false) + ).fieldType(); fieldTypes.put(ft.name(), ft); } } @@ -744,7 +745,7 @@ private Map createFieldTypes(RollupJobConfig job) { false, IndexVersion.current(), null - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); fieldTypes.put(ft.name(), ft); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java index 4ae293a796a1b..a575876016b13 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java @@ -44,7 +44,7 @@ public void testFetchSourceValue() throws IOException { false, false, geoFormatterFactory - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); @@ -105,7 +105,7 @@ public void testFetchStoredValue() throws IOException { false, false, geoFormatterFactory - ).setStored(true).build(MapperBuilderContext.root(randomBoolean())).fieldType(); + ).setStored(true).build(MapperBuilderContext.root(randomBoolean(), false)).fieldType(); ByteOrder byteOrder = randomBoolean() ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; @@ -149,7 +149,7 @@ private void fetchVectorTile(Geometry geometry) throws IOException { false, false, geoFormatterFactory - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); final int z = randomIntBetween(1, 10); int x = randomIntBetween(0, (1 << z) - 1); int y = randomIntBetween(0, (1 << z) - 1); @@ -261,7 +261,7 @@ private void assertFetchSourceGeometry(Object sourceValue, String wktValue, Map< false, false, geoFormatterFactory - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of(jsonValue), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktValue), fetchSourceValue(mapper, sourceValue, "wkt")); @@ -277,7 +277,7 @@ private void assertFetchStoredGeometry(String wktValue, Map json false, false, geoFormatterFactory - ).setStored(true).build(MapperBuilderContext.root(false)).fieldType(); + ).setStored(true).build(MapperBuilderContext.root(false, false)).fieldType(); Geometry geometry = WellKnownText.fromWKT(StandardValidator.instance(false), false, wktValue); @@ -297,7 +297,7 @@ private void assertFetchSourceMVT(Object sourceValue, String mvtEquivalentAsWKT) false, false, geoFormatterFactory - ).build(MapperBuilderContext.root(false)).fieldType(); + ).build(MapperBuilderContext.root(false, false)).fieldType(); final int extent = randomIntBetween(256, 4096); List mvtExpected = fetchSourceValue(mapper, mvtEquivalentAsWKT, "mvt(0/0/0@" + extent + ")"); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java index 19965a1faa7e1..ed902b0f8cfe1 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java @@ -18,7 +18,7 @@ public class PointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new PointFieldMapper.Builder("field", false).build(MapperBuilderContext.root(false)).fieldType(); + MappedFieldType mapper = new PointFieldMapper.Builder("field", false).build(MapperBuilderContext.root(false, false)).fieldType(); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1)); String wktPoint = "POINT (42.0 27.1)"; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java index 92309e96a6605..1050c9acef11a 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java @@ -20,7 +20,7 @@ public class ShapeFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { MappedFieldType mapper = new ShapeFieldMapper.Builder("field", IndexVersion.current(), false, true).build( - MapperBuilderContext.root(false) + MapperBuilderContext.root(false, false) ).fieldType(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java index f292f1baa567f..6f5b75441e10a 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java @@ -39,7 +39,7 @@ public class WildcardFieldAggregationTests extends AggregatorTestCase { public void setup() { WildcardFieldMapper.Builder builder = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersion.current()); builder.ignoreAbove(MAX_FIELD_LENGTH); - wildcardFieldMapper = builder.build(MapperBuilderContext.root(false)); + wildcardFieldMapper = builder.build(MapperBuilderContext.root(false, false)); wildcardFieldType = wildcardFieldMapper.fieldType(); } diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index f81e15ce02ce1..79727b9279a98 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -123,16 +123,16 @@ protected boolean supportsStoredFields() { public void setUp() throws Exception { Builder builder = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersion.current()); builder.ignoreAbove(MAX_FIELD_LENGTH); - wildcardFieldType = builder.build(MapperBuilderContext.root(false)); + wildcardFieldType = builder.build(MapperBuilderContext.root(false, false)); Builder builder79 = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersion.V_7_9_0); - wildcardFieldType79 = builder79.build(MapperBuilderContext.root(false)); + wildcardFieldType79 = builder79.build(MapperBuilderContext.root(false, false)); org.elasticsearch.index.mapper.KeywordFieldMapper.Builder kwBuilder = new KeywordFieldMapper.Builder( KEYWORD_FIELD_NAME, IndexVersion.current() ); - keywordFieldType = kwBuilder.build(MapperBuilderContext.root(false)); + keywordFieldType = kwBuilder.build(MapperBuilderContext.root(false, false)); rewriteDir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(WildcardFieldMapper.WILDCARD_ANALYZER_7_10); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java index 58c37f22c32b5..0e0db7c4547ae 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java @@ -18,21 +18,22 @@ public class WildcardFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new WildcardFieldMapper.Builder("field", IndexVersion.current()).build(MapperBuilderContext.root(false)) - .fieldType(); + MappedFieldType mapper = new WildcardFieldMapper.Builder("field", IndexVersion.current()).build( + MapperBuilderContext.root(false, false) + ).fieldType(); assertEquals(List.of("value"), fetchSourceValue(mapper, "value")); assertEquals(List.of("42"), fetchSourceValue(mapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper, true)); MappedFieldType ignoreAboveMapper = new WildcardFieldMapper.Builder("field", IndexVersion.current()).ignoreAbove(4) - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper, "value")); assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper, true)); MappedFieldType nullValueMapper = new WildcardFieldMapper.Builder("field", IndexVersion.current()).nullValue("NULL") - .build(MapperBuilderContext.root(false)) + .build(MapperBuilderContext.root(false, false)) .fieldType(); assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper, null)); } From 0247cfe44249828c418b0c346548ce3f4b2247a8 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 13 Sep 2023 17:37:34 +0300 Subject: [PATCH 038/114] Use long in Centroid count (#99491) * Use long in Centroid count Centroids currently use integers to track how many samples their mean tracks. This can overflow in case the digest tracks billions of samples or more. TDigestState already serializes the count as VLong, so it can be read as VInt without compatibility issues. Fixes #80153 * Update docs/changelog/99491.yaml * More test fixes * Bump TransportVersion * Revert TransportVersion change --- docs/changelog/99491.yaml | 6 ++++++ .../elasticsearch/tdigest/AVLGroupTree.java | 18 +++++++++--------- .../elasticsearch/tdigest/AVLTreeDigest.java | 14 +++++++------- .../org/elasticsearch/tdigest/Centroid.java | 12 ++++++------ .../elasticsearch/tdigest/HybridDigest.java | 2 +- .../elasticsearch/tdigest/MergingDigest.java | 4 ++-- .../elasticsearch/tdigest/SortingDigest.java | 2 +- .../org/elasticsearch/tdigest/TDigest.java | 2 +- .../tdigest/MergingDigestTests.java | 2 +- .../metrics/EmptyTDigestState.java | 2 +- .../aggregations/metrics/TDigestState.java | 6 +++--- .../xpack/analytics/AnalyticsTestsUtils.java | 2 +- .../HistogramPercentileAggregationTests.java | 4 ++-- 13 files changed, 41 insertions(+), 35 deletions(-) create mode 100644 docs/changelog/99491.yaml diff --git a/docs/changelog/99491.yaml b/docs/changelog/99491.yaml new file mode 100644 index 0000000000000..dfeab5dbbad6d --- /dev/null +++ b/docs/changelog/99491.yaml @@ -0,0 +1,6 @@ +pr: 99491 +summary: Use long in Centroid count +area: Aggregations +type: bug +issues: + - 80153 diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java index 584d66af500b9..12b2a29d3e034 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLGroupTree.java @@ -31,10 +31,10 @@ final class AVLGroupTree extends AbstractCollection { /* For insertions into the tree */ private double centroid; - private int count; + private long count; private double[] centroids; - private int[] counts; - private int[] aggregatedCounts; + private long[] counts; + private long[] aggregatedCounts; private final IntAVLTree tree; AVLGroupTree() { @@ -78,8 +78,8 @@ protected void fixAggregates(int node) { }; centroids = new double[tree.capacity()]; - counts = new int[tree.capacity()]; - aggregatedCounts = new int[tree.capacity()]; + counts = new long[tree.capacity()]; + aggregatedCounts = new long[tree.capacity()]; } /** @@ -113,14 +113,14 @@ public double mean(int node) { /** * Return the count for the provided node. */ - public int count(int node) { + public long count(int node) { return counts[node]; } /** * Add the provided centroid to the tree. */ - public void add(double centroid, int count) { + public void add(double centroid, long count) { this.centroid = centroid; this.count = count; tree.add(); @@ -135,7 +135,7 @@ public boolean add(Centroid centroid) { /** * Update values associated with a node, readjusting the tree if necessary. */ - public void update(int node, double centroid, int count) { + public void update(int node, double centroid, long count) { // have to do full scale update this.centroid = centroid; this.count = count; @@ -242,7 +242,7 @@ public void remove() { /** * Return the total count of points that have been added to the tree. */ - public int sum() { + public long sum() { return aggregatedCounts[tree.root()]; } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java index cdd6e5ab2b16a..deb3407565f36 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/AVLTreeDigest.java @@ -68,7 +68,7 @@ public int centroidCount() { } @Override - public void add(double x, int w) { + public void add(double x, long w) { checkValue(x); needsCompression = true; @@ -84,7 +84,7 @@ public void add(double x, int w) { } if (start == NIL) { // empty summary - assert summary.size() == 0; + assert summary.isEmpty(); summary.add(x, w); count = w; } else { @@ -127,7 +127,7 @@ public void add(double x, int w) { // if the nearest point was not unique, then we may not be modifying the first copy // which means that ordering can change double centroid = summary.mean(closest); - int count = summary.count(closest); + long count = summary.count(closest); centroid = weightedAverage(centroid, count, x, w); count += w; summary.update(closest, centroid, count); @@ -189,7 +189,7 @@ public long size() { @Override public double cdf(double x) { AVLGroupTree values = summary; - if (values.size() == 0) { + if (values.isEmpty()) { return Double.NaN; } if (values.size() == 1) { @@ -272,7 +272,7 @@ public double quantile(double q) { } AVLGroupTree values = summary; - if (values.size() == 0) { + if (values.isEmpty()) { // no centroids means no data, no way to get a quantile return Double.NaN; } else if (values.size() == 1) { @@ -293,7 +293,7 @@ public double quantile(double q) { } int currentNode = values.first(); - int currentWeight = values.count(currentNode); + long currentWeight = values.count(currentNode); // Total mass to the left of the center of the current node. double weightSoFar = currentWeight / 2.0; @@ -305,7 +305,7 @@ public double quantile(double q) { for (int i = 0; i < values.size() - 1; i++) { int nextNode = values.next(currentNode); - int nextWeight = values.count(nextNode); + long nextWeight = values.count(nextNode); // this is the mass between current center and next center double dw = (currentWeight + nextWeight) / 2.0; diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java index ac2ddb869d14d..a09a0862c30af 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java @@ -30,7 +30,7 @@ public class Centroid implements Comparable { private static final AtomicInteger uniqueCount = new AtomicInteger(1); private double centroid = 0; - private int count = 0; + private long count = 0; // The ID is transient because it must be unique within a given JVM. A new // ID should be generated from uniqueCount when a Centroid is deserialized. @@ -45,22 +45,22 @@ public Centroid(double x) { start(x, 1, uniqueCount.getAndIncrement()); } - public Centroid(double x, int w) { + public Centroid(double x, long w) { this(); start(x, w, uniqueCount.getAndIncrement()); } - public Centroid(double x, int w, int id) { + public Centroid(double x, long w, int id) { this(); start(x, w, id); } - private void start(double x, int w, int id) { + private void start(double x, long w, int id) { this.id = id; add(x, w); } - public void add(double x, int w) { + public void add(double x, long w) { count += w; centroid += w * (x - centroid) / count; } @@ -69,7 +69,7 @@ public double mean() { return centroid; } - public int count() { + public long count() { return count; } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java index 999f03e91ae4f..07a12381e2a71 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/HybridDigest.java @@ -70,7 +70,7 @@ public class HybridDigest extends AbstractTDigest { } @Override - public void add(double x, int w) { + public void add(double x, long w) { reserve(w); if (mergingDigest != null) { mergingDigest.add(x, w); diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java index 51364d8dc281b..0be2b68d76a21 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/MergingDigest.java @@ -216,7 +216,7 @@ public MergingDigest(double compression, int bufferSize, int size) { } @Override - public void add(double x, int w) { + public void add(double x, long w) { checkValue(x); if (tempUsed >= tempWeight.length - lastUsedCell - 1) { mergeNewValues(); @@ -514,7 +514,7 @@ public boolean hasNext() { @Override public Centroid next() { - Centroid rc = new Centroid(mean[i], (int) weight[i]); + Centroid rc = new Centroid(mean[i], (long) weight[i]); i++; return rc; } diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java index 200a54494e208..92f770cbb7569 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/SortingDigest.java @@ -39,7 +39,7 @@ public class SortingDigest extends AbstractTDigest { private boolean isSorted = true; @Override - public void add(double x, int w) { + public void add(double x, long w) { checkValue(x); isSorted = isSorted && (values.isEmpty() || values.get(values.size() - 1) <= x); for (int i = 0; i < w; i++) { diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java index 5dc3706193211..2eaf3192eefef 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/TDigest.java @@ -94,7 +94,7 @@ public static TDigest createHybridDigest(double compression) { * @param x The value to add. * @param w The weight of this point. */ - public abstract void add(double x, int w); + public abstract void add(double x, long w); /** * Add a single sample to this TDigest. diff --git a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MergingDigestTests.java b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MergingDigestTests.java index 2eb989f34f8a2..16a81bad50756 100644 --- a/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MergingDigestTests.java +++ b/libs/tdigest/src/test/java/org/elasticsearch/tdigest/MergingDigestTests.java @@ -118,7 +118,7 @@ public void testSingletonsAtEnds() { d.add(x); } } - int last = 0; + long last = 0; for (Centroid centroid : d.centroids()) { if (last == 0) { assertEquals(1, centroid.count()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java index 159e70083065c..a367b52961467 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/EmptyTDigestState.java @@ -15,7 +15,7 @@ public EmptyTDigestState() { } @Override - public void add(double x, int w) { + public void add(double x, long w) { throw new UnsupportedOperationException("Immutable Empty TDigest"); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java index c631e30a0e64f..d80eb8a58040e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java @@ -138,7 +138,7 @@ public static TDigestState read(StreamInput in) throws IOException { state.tdigest.reserve(size); } for (int i = 0; i < n; i++) { - state.add(in.readDouble(), in.readVInt()); + state.add(in.readDouble(), in.readVLong()); } return state; } @@ -189,7 +189,7 @@ public int hashCode() { h = 31 * h + Integer.hashCode(centroidCount()); for (Centroid centroid : centroids()) { h = 31 * h + Double.hashCode(centroid.mean()); - h = 31 * h + centroid.count(); + h = 31 * h + (int) centroid.count(); } h = 31 * h + Double.hashCode(getMax()); h = 31 * h + Double.hashCode(getMin()); @@ -205,7 +205,7 @@ public void add(TDigestState other) { tdigest.add(other.tdigest); } - public void add(double x, int w) { + public void add(double x, long w) { tdigest.add(x, w); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java index 48ffaea45b436..afb46709959ab 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java @@ -29,7 +29,7 @@ public static BinaryDocValuesField histogramFieldDocValues(String fieldName, dou BytesStreamOutput streamOutput = new BytesStreamOutput(); histogram.compress(); for (Centroid centroid : histogram.centroids()) { - streamOutput.writeVInt(centroid.count()); + streamOutput.writeVLong(centroid.count()); streamOutput.writeDouble(centroid.mean()); } return new BinaryDocValuesField(fieldName, streamOutput.bytes().toBytesRef()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java index c7526af02a772..bf289a601ae21 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java @@ -185,7 +185,7 @@ private void setupTDigestHistogram(int compression) throws Exception { client().bulk(bulkRequest); bulkRequest = new BulkRequest(); List values = new ArrayList<>(); - List counts = new ArrayList<>(); + List counts = new ArrayList<>(); Collection centroids = histogram.centroids(); for (Centroid centroid : centroids) { values.add(centroid.mean()); @@ -196,7 +196,7 @@ private void setupTDigestHistogram(int compression) throws Exception { .startObject("inner") .startObject("data") .field("values", values.toArray(new Double[values.size()])) - .field("counts", counts.toArray(new Integer[counts.size()])) + .field("counts", counts.toArray(new Long[counts.size()])) .endObject() .endObject() .endObject(); From 5f9bb0ccbfb203f4a306d2e1af95ad7f6d691c7f Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 13 Sep 2023 15:37:44 +0100 Subject: [PATCH 039/114] Reinstate testRunnableRunsAtMostOnceAfterCancellation (#99525) This test was failing in #34004 due to a race, and although #34296 made the failures rarer they did not actually fix the race. Then in #99201 we fixed the race but the resulting test over-synchronizes and no longer meaningfully verifies the concurrent behaviour we were originally trying to check. It also fails for other reasons. This commit reverts back to the original test showing that we might run the action at most once after cancellation without any further synchronization, but fixes the assertion to use the value of the counter observed immediately after the cancellation since we cannot be sure that no extra iterations execute before the cancellation completes. --- .../ScheduleWithFixedDelayTests.java | 61 ++++++------------- 1 file changed, 19 insertions(+), 42 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java b/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java index ee7b929072bd1..ee2c39e52fadd 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java @@ -22,17 +22,15 @@ import org.junit.Before; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.same; @@ -275,49 +273,28 @@ public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor assertTrue(reschedulingRunnable.isCancelled()); } - public void testRunnableDoesNotRunAfterCancellation() throws Exception { - int iterations = scaledRandomIntBetween(2, 12); - - // we don't have the cancellable until we schedule the task, which needs the barrier object to reference in the closure - // so break the circular dependency here - AtomicReference checkCancel = new AtomicReference<>(); - - AtomicInteger counter = new AtomicInteger(); - CyclicBarrier barrier = new CyclicBarrier(2, () -> checkCancel.get().run()); - Runnable countingRunnable = () -> { + public void testRunnableRunsAtMostOnceAfterCancellation() throws Exception { + final var intervalMillis = randomLongBetween(1, 50); + final AtomicInteger counter = new AtomicInteger(); + final CountDownLatch doneLatch = new CountDownLatch(scaledRandomIntBetween(1, 12)); + final Runnable countingRunnable = () -> { counter.incrementAndGet(); - try { - barrier.await(); - } catch (Exception e) { - throw new AssertionError(e); - } + doneLatch.countDown(); }; - TimeValue interval = TimeValue.timeValueMillis(50L); - Cancellable cancellable = threadPool.scheduleWithFixedDelay(countingRunnable, interval, threadPool.generic()); - checkCancel.set(new Runnable() { - private int remaining = iterations; - - @Override - public void run() { - if (--remaining == 0) { - cancellable.cancel(); - } - } - }); - - for (int i = 0; i < iterations; i++) { - barrier.await(); - } - expectThrows(TimeoutException.class, () -> barrier.await(2 * interval.millis(), TimeUnit.MILLISECONDS)); - - assertThat(counter.get(), equalTo(iterations)); - + final Cancellable cancellable = threadPool.scheduleWithFixedDelay( + countingRunnable, + TimeValue.timeValueMillis(intervalMillis), + threadPool.generic() + ); + safeAwait(doneLatch); + assertTrue(cancellable.cancel()); + final var iterations = counter.get(); if (rarely()) { - assertBusy(() -> { - expectThrows(TimeoutException.class, () -> barrier.await(interval.millis(), TimeUnit.MILLISECONDS)); - assertThat(counter.get(), equalTo(iterations)); - }, 5 * interval.millis(), TimeUnit.MILLISECONDS); + Thread.sleep(randomLongBetween(0, intervalMillis * 5)); + } else if (randomBoolean()) { + Thread.yield(); } + assertThat(counter.get(), oneOf(iterations, iterations + 1)); } } From 454164de8e6f54b338e1432c270d41a69ef4d08e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 13 Sep 2023 08:06:05 -0700 Subject: [PATCH 040/114] Remove 8.9 branch for CI configuration --- branches.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/branches.json b/branches.json index 3f6bcc88144da..e40478a5b194c 100644 --- a/branches.json +++ b/branches.json @@ -7,9 +7,6 @@ { "branch": "8.10" }, - { - "branch": "8.9" - }, { "branch": "7.17" } From 9da10224618aaf8e43aad395d1e46df29407dd84 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 13 Sep 2023 11:09:54 -0400 Subject: [PATCH 041/114] [DOCS] Fix `welcome-to-elastic` links (#99530) **Problem:** In https://github.com/elastic/docs/pull/2752, we updated the URL prefix (`welcome-to-elastic`) and name for the "Welcome to Elastic Docs" docs. However, we still have some stray links that use the old `/welcome-to-elastic` URL prefix **Solution:** Updates several outdated links. --- docs/reference/landing-page.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/landing-page.asciidoc b/docs/reference/landing-page.asciidoc index 154e2982949ac..09501178e24f8 100644 --- a/docs/reference/landing-page.asciidoc +++ b/docs/reference/landing-page.asciidoc @@ -62,7 +62,7 @@ Elasticsearch is the search and analytics engine that powers the Elastic Stack.

- +

@@ -218,7 +218,7 @@
- +

From b1da97af170a166b75aa2e5712767d5dd67ec67b Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 13 Sep 2023 18:28:03 +0300 Subject: [PATCH 042/114] Document how to reindex a TSDS (#99476) * Document how to reindex a TSDS Time-series data streams require updating start and end times in the destination index template, to avoid errors during copying of older docs. * Update docs/changelog/99476.yaml * Spotless fix. * Refresh indexes in unittest. * Fix typo. * Delete docs/changelog/99476.yaml * Fix page link name. * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink * Update docs/reference/data-streams/tsds-reindex.asciidoc Co-authored-by: Abdon Pijpelink --------- Co-authored-by: Abdon Pijpelink --- .../data-streams/tsds-reindex.asciidoc | 292 ++++++++++++++++++ docs/reference/data-streams/tsds.asciidoc | 1 + .../datastreams/TsdbDataStreamRestIT.java | 168 ++++++++++ 3 files changed, 461 insertions(+) create mode 100644 docs/reference/data-streams/tsds-reindex.asciidoc diff --git a/docs/reference/data-streams/tsds-reindex.asciidoc b/docs/reference/data-streams/tsds-reindex.asciidoc new file mode 100644 index 0000000000000..ea4ba16df5c4a --- /dev/null +++ b/docs/reference/data-streams/tsds-reindex.asciidoc @@ -0,0 +1,292 @@ +[[tsds-reindex]] +=== Reindex a time series data stream (TSDS) + +++++ +Reindex a TSDS +++++ + +[discrete] +[[tsds-reindex-intro]] +==== Introduction + +With reindexing, you can copy documents from an old time-series data stream (TSDS) to a new one. Data streams support +reindexing in general, with a few <>. Still, time-series data streams +introduce additional challenges due to tight control on the accepted timestamp range for each backing index they +contain. Direct use of the reindex API would likely error out due to attempting to insert documents with timestamps that are +outside the current acceptance window. + +To avoid these limitations, use the process that is outlined below: + +. Create an index template for the destination data stream that will contain the re-indexed data. +. Update the template to +.. Set `index.time_series.start_time` and `index.time_series.end_time` index settings to +match the lowest and highest `@timestamp` values in the old data stream. +.. Set the `index.number_of_shards` index setting to the sum of all primary shards of all backing +indices of the old data stream. +.. Set `index.number_of_replicas` to zero and unset the `index.lifecycle.name` index setting. +. Run the reindex operation to completion. +. Revert the overriden index settings in the destination index template. +. Invoke the `rollover` api to create a new backing index that can receive new documents. + +NOTE: This process only applies to time-series data streams without <> configuration. Data +streams with downsampling can only be re-indexed by re-indexing their backing indexes individually and adding them to an +empty destination data stream. + +In what follows, we elaborate on each step of the process with examples. + +[discrete] +[[tsds-reindex-create-template]] +==== Create a TSDS template to accept old documents + +Consider a TSDS with the following template: + +[source,console] +---- +POST /_component_template/source_template +{ + "template": { + "settings": { + "index": { + "number_of_replicas": 2, + "number_of_shards": 2, + "mode": "time_series", + "routing_path": [ "metricset" ] + } + }, + "mappings": { + "properties": { + "@timestamp": { "type": "date" }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "k8s": { + "properties": { + "tx": { "type": "long" }, + "rx": { "type": "long" } + } + } + } + } + } +} + +POST /_index_template/1 +{ + "index_patterns": [ + "k8s*" + ], + "composed_of": [ + "source_template" + ], + "data_stream": {} +} +---- +// TEST[skip: not expected to match the sample below] + +A possible output of `/k8s/_settings` looks like: + +[source,console-result] +---- + +{ + ".ds-k8s-2023.09.01-000002": { + "settings": { + "index": { + "mode": "time_series", + "routing": { + "allocation": { + "include": { + "_tier_preference": "data_hot" + } + } + }, + "hidden": "true", + "number_of_shards": "2", + "time_series": { + "end_time": "2023-09-01T14:00:00.000Z", + "start_time": "2023-09-01T10:00:00.000Z" + }, + "provided_name": ".ds-k9s-2023.09.01-000002", + "creation_date": "1694439857608", + "number_of_replicas": "2", + "routing_path": [ + "metricset" + ], + ... + } + } + }, + ".ds-k8s-2023.09.01-000001": { + "settings": { + "index": { + "mode": "time_series", + "routing": { + "allocation": { + "include": { + "_tier_preference": "data_hot" + } + } + }, + "hidden": "true", + "number_of_shards": "2", + "time_series": { + "end_time": "2023-09-01T10:00:00.000Z", + "start_time": "2023-09-01T06:00:00.000Z" + }, + "provided_name": ".ds-k9s-2023.09.01-000001", + "creation_date": "1694439837126", + "number_of_replicas": "2", + "routing_path": [ + "metricset" + ], + ... + } + } + } +} +---- +// NOTCONSOLE + +To reindex this TSDS, do not to re-use its index template in the destination data stream, to avoid impacting its +functionality. Instead, clone the template of the source TSDS and apply the following modifications: + +* Set `index.time_series.start_time` and `index.time_series.end_time` index settings explicitly. Their values should be +based on the lowest and highest `@timestamp` values in the data stream to reindex. This way, the initial backing index can +load all data that is contained in the source data stream. +* Set `index.number_of_shards` index setting to the sum of all primary shards of all backing indices of the source data +stream. This helps maintain the same level of search parallelism, as each shard is processed in a separate thread (or +more). +* Unset the `index.lifecycle.name` index setting, if any. This prevents ILM from modifying the destination data stream +during reindexing. +* (Optional) Set `index.number_of_replicas` to zero. This helps speed up the reindex operation. Since the data gets +copied, there is limited risk of data loss due to lack of replicas. + +Using the example above as source TSDS, the template for the destination TSDS would be: + +[source,console] +---- +POST /_component_template/destination_template +{ + "template": { + "settings": { + "index": { + "number_of_replicas": 0, + "number_of_shards": 4, + "mode": "time_series", + "routing_path": [ "metricset" ], + "time_series": { + "end_time": "2023-09-01T14:00:00.000Z", + "start_time": "2023-09-01T06:00:00.000Z" + } + } + }, + "mappings": { + "properties": { + "@timestamp": { "type": "date" }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "k8s": { + "properties": { + "tx": { "type": "long" }, + "rx": { "type": "long" } + } + } + } + } + } +} + +POST /_index_template/2 +{ + "index_patterns": [ + "k8s*" + ], + "composed_of": [ + "destination_template" + ], + "data_stream": {} +} +---- +// TEST[continued] + +[discrete] +[[tsds-reindex-op]] +==== Reindex + +Invoke the reindex api, for instance: + +[source,console] +---- +POST /_reindex +{ + "source": { + "index": "k8s" + }, + "dest": { + "index": "k9s", + "op_type": "create" + } +} +---- +// TEST[continued] + +[discrete] +[[tsds-reindex-restore]] +==== Restore the destination index template + +Once the reindexing operation completes, restore the index template for the destination TSDS as follows: + +* Remove the overrides for `index.time_series.start_time` and `index.time_series.end_time`. +* Restore the values of `index.number_of_shards`, `index.number_of_replicas` and `index.lifecycle.name` as +applicable. + +Using the previous example, the destination template is modified as follows: + +[source,console] +---- +POST /_component_template/destination_template +{ + "template": { + "settings": { + "index": { + "number_of_replicas": 2, + "number_of_shards": 2, + "mode": "time_series", + "routing_path": [ "metricset" ] + } + }, + "mappings": { + "properties": { + "@timestamp": { "type": "date" }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "k8s": { + "properties": { + "tx": { "type": "long" }, + "rx": { "type": "long" } + } + } + } + } + } +} +---- +// TEST[continued] + +Next, Invoke the `rollover` api on the destination data stream without any conditions set. + +[source,console] +---- +POST /k9s/_rollover/ +---- +// TEST[continued] + +This creates a new backing index with the updated index settings. The destination data stream is now ready to accept new documents. + +Note that the initial backing index can still accept documents within the range of timestamps derived from the source data +stream. If this is not desired, mark it as <> explicitly. diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index d6e9ea08f0892..d70f314205c65 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -340,3 +340,4 @@ include::tsds-index-settings.asciidoc[] include::downsampling.asciidoc[] include::downsampling-ilm.asciidoc[] include::downsampling-manual.asciidoc[] +include::tsds-reindex.asciidoc[] diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java index 2a4b6f0c5a5ee..1dba78b8cc431 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java @@ -683,6 +683,174 @@ public void testLookBackTime() throws IOException { assertThat(endTimeFirstBackingIndex, notNullValue()); } + public void testReindexTsdbDataStream() throws Exception { + var deleteRequest = new Request("DELETE", "/_index_template/1"); + assertOK(client().performRequest(deleteRequest)); + deleteRequest = new Request("DELETE", "/_component_template/custom_template"); + assertOK(client().performRequest(deleteRequest)); + + final int SECONDS_PER_DAY = 24 * 60 * 60; + final String CUSTOM_TEMPLATE_WITH_START_END_TIME = """ + { + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 4, + "mode": "time_series", + "routing_path": ["metricset", "k8s.pod.uid"], + "time_series": { + "start_time": "$start", + "end_time": "$end" + } + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "k8s": { + "properties": { + "pod": { + "properties": { + "uid": { + "type": "keyword", + "time_series_dimension": true + }, + "name": { + "type": "keyword" + }, + "ip": { + "type": "ip" + }, + "network": { + "properties": { + "tx": { + "type": "long" + }, + "rx": { + "type": "long" + } + } + } + } + } + } + } + } + } + } + } + """; + + // Create a data stream that's one week old. + var request = new Request("POST", "/_component_template/source_template"); + request.setJsonEntity( + CUSTOM_TEMPLATE_WITH_START_END_TIME.replace("$start", formatInstantNanos(Instant.now().minusSeconds(8 * SECONDS_PER_DAY))) + .replace("$end", formatInstantNanos(Instant.now().minusSeconds(6 * SECONDS_PER_DAY))) + ); + assertOK(client().performRequest(request)); + + request = new Request("POST", "/_index_template/1"); + request.setJsonEntity(""" + { + "index_patterns": ["k8s*"], + "composed_of": ["source_template"], + "data_stream": { + } + }"""); + assertOK(client().performRequest(request)); + + // Add some docs to it. + var bulkRequest = new Request("POST", "/k8s/_bulk"); + bulkRequest.setJsonEntity(BULK.replace("$now", formatInstantNanos(Instant.now().minusSeconds(7 * SECONDS_PER_DAY)))); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + + // Clone the old data stream. + request = new Request("POST", "/_component_template/destination_template"); + request.setJsonEntity( + CUSTOM_TEMPLATE_WITH_START_END_TIME.replace("$start", formatInstantNanos(Instant.now().minusSeconds(8 * SECONDS_PER_DAY))) + .replace("$end", formatInstantNanos(Instant.now().minusSeconds(6 * SECONDS_PER_DAY))) + ); + assertOK(client().performRequest(request)); + + request = new Request("POST", "/_index_template/2"); + request.setJsonEntity(""" + { + "index_patterns": ["k9s*"], + "composed_of": ["destination_template"], + "data_stream": { + } + }"""); + assertOK(client().performRequest(request)); + + // Reindex. + request = new Request("POST", "/_reindex"); + request.setJsonEntity(""" + { + "source": { + "index": "k8s" + }, + "dest": { + "index": "k9s", + "op_type": "create" + } + } + """); + assertOK(client().performRequest(request)); + + var getDataStreamsRequest = new Request("GET", "/_data_stream"); + response = client().performRequest(getDataStreamsRequest); + assertOK(response); + var dataStreams = entityAsMap(response); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams"), hasSize(2)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo("k8s")); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.indices"), hasSize(1)); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.1.name"), equalTo("k9s")); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.1.indices"), hasSize(1)); + + // Update the start and end time of the new data stream. + request = new Request("POST", "/_component_template/destination_template"); + request.setJsonEntity( + CUSTOM_TEMPLATE_WITH_START_END_TIME.replace("$start", formatInstantNanos(Instant.now().minusSeconds(SECONDS_PER_DAY))) + .replace("$end", formatInstantNanos(Instant.now().plusSeconds(SECONDS_PER_DAY))) + ); + assertOK(client().performRequest(request)); + + // Rollover to create a new index with the new settings. + request = new Request("POST", "/k9s/_rollover"); + client().performRequest(request); + + // Insert a doc with a current timestamp. + request = new Request("POST", "/k9s/_doc"); + request.setJsonEntity(DOC.replace("$time", formatInstantNanos(Instant.now()))); + assertOK(client().performRequest(request)); + + request = new Request("POST", "_refresh"); + assertOK(client().performRequest(request)); + + var searchRequest = new Request("GET", "k9s/_search"); + response = client().performRequest(searchRequest); + assertOK(response); + responseBody = entityAsMap(response); + try { + assertThat(ObjectPath.evaluate(responseBody, "hits.total.value"), equalTo(9)); + assertThat(ObjectPath.evaluate(responseBody, "hits.total.relation"), equalTo("eq")); + } catch (Exception | AssertionError e) { + logger.error("search response body causing assertion error [" + responseBody + "]", e); + throw e; + } + } + private static Map getIndex(String indexName) throws IOException { var getIndexRequest = new Request("GET", "/" + indexName + "?human"); var response = client().performRequest(getIndexRequest); From a97915f0b0604272bf09f3d1fd32eec0b86d1b3f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 13 Sep 2023 17:45:26 +0200 Subject: [PATCH 043/114] Prepare aggregation yaml tests to be included in serverless build. (#99413) * Remove setting the number of replicas in aggregation yaml tests. * Stop waiting for yellow/green. The create index request by default return s when at least the primary shard has been assigned. * Adjust assertions that are sensitive to the number of shard copies. * Muted 4 aggregation yaml tests in mixed cluster qa ([link](https://github.com/elastic/elasticsearch/pull/99413/files#diff-033fa895a9866248015cbe7871deab0b3c88fa66a5e1bc9e789079d0d1a39f24R31)). These tests assert cache hits and with replicas we currently can't reliable assert this in a multi node test cluster. Like the mentioned comment suggest if the node selector can reliable select the same node, then we can unmute. I think muting is ok for now. These yaml tests are run in aggregation module and will soon be run in serverless. I think enforcing that many yaml tests run tests against indices with no replicas is worse for test coverage than muting 4 yaml tests in this qa module. Additionally I think that testing that the request cache work should not be tested in full blown integration tests, but more targeted unit/semi integration tests. --- .../test/aggregations/adjacency_matrix.yml | 8 +++--- .../test/aggregations/auto_date_histogram.yml | 5 ---- .../test/aggregations/avg_metric.yml | 4 --- .../test/aggregations/cardinality_metric.yml | 4 --- .../test/aggregations/composite.yml | 6 ----- .../test/aggregations/composite_sorted.yml | 1 - .../test/aggregations/date_histogram.yml | 10 -------- .../test/aggregations/date_range.yml | 25 ++++++------------- .../test/aggregations/doc_count_field.yml | 2 -- .../aggregations/extended_stats_metric.yml | 2 -- .../test/aggregations/filter.yml | 13 ++++++---- .../test/aggregations/filters_bucket.yml | 11 +++----- .../test/aggregations/geo_bounds_centroid.yml | 2 -- .../test/aggregations/histogram.yml | 7 ------ .../test/aggregations/ip_prefix.yml | 2 -- .../test/aggregations/ip_range.yml | 6 ----- .../test/aggregations/max_metric.yml | 8 +----- .../median_absolute_deviation_metric.yml | 2 -- .../test/aggregations/min_metric.yml | 4 --- .../test/aggregations/missing.yml | 2 -- .../percentile_ranks_hdr_metric.yml | 2 -- .../percentile_ranks_tdigest_metric.yml | 2 -- .../aggregations/percentiles_hdr_metric.yml | 1 - .../percentiles_tdigest_metric.yml | 2 -- .../test/aggregations/pipeline.yml | 3 --- .../rest-api-spec/test/aggregations/range.yml | 10 -------- .../test/aggregations/range_timezone_bug.yml | 6 ----- .../test/aggregations/rare_terms.yml | 9 ------- .../test/aggregations/scripted_metric.yml | 1 - .../test/aggregations/significant_texts.yml | 2 -- .../test/aggregations/stats_metric.yml | 2 -- .../test/aggregations/sum_metric.yml | 4 --- .../rest-api-spec/test/aggregations/terms.yml | 11 -------- .../test/aggregations/terms_disable_opt.yml | 1 - .../aggregations/terms_flattened_field.yml | 5 ---- .../test/aggregations/time_series.yml | 5 ---- .../aggregations/top_hits_nested_metric.yml | 2 -- .../test/aggregations/typed_keys.yml | 2 -- .../test/aggregations/value_count_metric.yml | 2 -- .../aggregations/variable_width_histogram.yml | 2 -- .../test/aggregations/weighted_avg.yml | 1 - qa/mixed-cluster/build.gradle | 17 +++++++++++++ 42 files changed, 42 insertions(+), 174 deletions(-) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml index bccafc23b0d5f..e02b5fdc8b3cf 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: num: @@ -75,7 +74,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: num: @@ -97,6 +95,7 @@ setup: version: current # the version of the node that parsed the request is part of the cache key. search: index: test + preference: hit-same-shard-copy body: size: 0 aggs: @@ -140,7 +139,7 @@ setup: - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 0 } - match: { indices.test.total.request_cache.miss_count: 1 } @@ -150,6 +149,7 @@ setup: version: current # the version of the node that parsed the request is part of the cache key. search: index: test + preference: hit-same-shard-copy body: size: 0 aggs: @@ -176,6 +176,6 @@ setup: path: num - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 1 } - match: { indices.test.total.request_cache.miss_count: 1 } diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/auto_date_histogram.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/auto_date_histogram.yml index 93c934804ebd4..792a81e32f5cb 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/auto_date_histogram.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/auto_date_histogram.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: date: @@ -105,8 +104,6 @@ setup: indices.create: index: date_field_type_date body: - settings: - number_of_replicas: 0 mappings: properties: date_field: @@ -116,8 +113,6 @@ setup: indices.create: index: date_field_type_date_nanos body: - settings: - number_of_replicas: 0 mappings: properties: date_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/avg_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/avg_metric.yml index 53ff2d939efa3..a629d90ec9f7d 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/avg_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/avg_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -47,8 +45,6 @@ setup: indices.create: index: test_2 body: - settings: - number_of_replicas: 0 mappings: properties: other_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/cardinality_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/cardinality_metric.yml index e5e3914c45cf8..b03fec6331168 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/cardinality_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/cardinality_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -47,8 +45,6 @@ setup: indices.create: index: test_2 body: - settings: - number_of_replicas: 0 mappings: properties: other_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite.yml index 44a521792d7d1..e480f9636f351 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite.yml @@ -66,8 +66,6 @@ setup: indices.create: index: date_histogram_timezone_test body: - settings: - number_of_replicas: 0 mappings: properties: date: @@ -1621,8 +1619,6 @@ setup: indices.create: index: date_field_type_date body: - settings: - number_of_replicas: 0 mappings: properties: date_field: @@ -1632,8 +1628,6 @@ setup: indices.create: index: date_field_type_date_nanos body: - settings: - number_of_replicas: 0 mappings: properties: date_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite_sorted.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite_sorted.yml index f343c2686ff70..426e6631ab06d 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite_sorted.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/composite_sorted.yml @@ -13,7 +13,6 @@ setup: settings: index: number_of_shards: 1 - number_of_replicas: 0 sort: field: date order: desc diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml index 1e4ae5abc14d4..bb1a2336bce83 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml @@ -25,8 +25,6 @@ setup: indices.create: index: date_histogram_timezone_test body: - settings: - number_of_replicas: 0 mappings: properties: date: @@ -36,8 +34,6 @@ setup: indices.create: index: test_timezone_date_nanos body: - settings: - number_of_replicas: 0 mappings: properties: date: @@ -47,8 +43,6 @@ setup: indices.create: index: timezone_daylight_test body: - settings: - number_of_replicas: 0 mappings: properties: date: @@ -499,8 +493,6 @@ setup: indices.create: index: date_field_type_date body: - settings: - number_of_replicas: 0 mappings: properties: date_field: @@ -510,8 +502,6 @@ setup: indices.create: index: date_field_type_date_nanos body: - settings: - number_of_replicas: 0 mappings: properties: date_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_range.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_range.yml index e1160520ac3e7..af78ab3a3677e 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_range.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_range.yml @@ -3,8 +3,6 @@ setup: indices.create: index: old_test body: - settings: - number_of_replicas: 0 mappings: properties: date: @@ -15,24 +13,18 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: date: type: date format: epoch_second - - do: - cluster.health: - wait_for_status: yellow - # March 8th 2020 was the start of daylight savings time that year - do: bulk: index: test refresh: true - body: + body: - {"index": {}} - { "date" : "2020-03-08T06:15:00Z" } - {"index": {}} @@ -44,7 +36,7 @@ setup: bulk: index: old_test refresh: true - body: + body: - {"index": {}} - { "date" : 1000 } - {"index": {}} @@ -56,15 +48,15 @@ setup: search: body: size: 0 - aggs: - date_range: - date_range: + aggs: + date_range: + date_range: field: "date" - ranges: - - + ranges: + - from: 1000 to: 3000 - - + - from: 3000 to: 4000 @@ -141,7 +133,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 - do: search: index: test_a_unmapped diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/doc_count_field.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/doc_count_field.yml index 911f7dc58caad..574eba61dafbe 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/doc_count_field.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/doc_count_field.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: str: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/extended_stats_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/extended_stats_metric.yml index b5a6046b4d7a1..6d0e167d10f33 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/extended_stats_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/extended_stats_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml index 3855701cfadb1..e68aa621f1544 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: mentions: @@ -35,6 +34,7 @@ setup: rest_total_hits_as_int: true size: 0 request_cache: true + preference: hit-same-shard-copy body: aggs: itemsNotify: @@ -58,7 +58,7 @@ setup: # The first request will miss the cache - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 0 } - match: { indices.test.total.request_cache.miss_count: 1 } @@ -69,6 +69,7 @@ setup: rest_total_hits_as_int: true size: 0 request_cache: true + preference: hit-same-shard-copy body: aggs: itemsNotify: @@ -92,7 +93,7 @@ setup: # The second result with hit the cache - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 1 } - match: { indices.test.total.request_cache.miss_count: 1 } @@ -108,6 +109,7 @@ setup: rest_total_hits_as_int: true size: 0 request_cache: true + preference: hit-same-shard-copy body: aggs: itemsNotify: @@ -128,7 +130,7 @@ setup: # The first request will miss the cache - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 0 } - match: { indices.test.total.request_cache.miss_count: 1 } - is_true: indices.test @@ -141,6 +143,7 @@ setup: rest_total_hits_as_int: true size: 0 request_cache: true + preference: hit-same-shard-copy body: aggs: itemsNotify: @@ -161,7 +164,7 @@ setup: # The first request will miss the cache - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 1 } - match: { indices.test.total.request_cache.miss_count: 1 } diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml index 09f40da77fddb..0053d22f05a80 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -475,8 +473,6 @@ nested: indices.create: index: test_nested body: - settings: - number_of_replicas: 0 mappings: properties: i: @@ -525,7 +521,6 @@ nested: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: mentions: @@ -550,6 +545,7 @@ nested: rest_total_hits_as_int: true size: 0 request_cache: true + preference: hit-same-shard-copy index: test body: aggs: @@ -578,7 +574,7 @@ nested: # The first request will miss the cache - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 0 } - match: { indices.test.total.request_cache.miss_count: 1 } @@ -589,6 +585,7 @@ nested: rest_total_hits_as_int: true size: 0 request_cache: true + preference: hit-same-shard-copy index: test body: aggs: @@ -617,6 +614,6 @@ nested: # The second result with hit the cache - do: indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } + - gte: { _shards.total: 1 } - match: { indices.test.total.request_cache.hit_count: 1 } - match: { indices.test.total.request_cache.miss_count: 1 } diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/geo_bounds_centroid.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/geo_bounds_centroid.yml index bfd1763aaba8f..5a4a78a73cdc6 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/geo_bounds_centroid.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/geo_bounds_centroid.yml @@ -6,8 +6,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: location: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/histogram.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/histogram.yml index 4a5c390dc3f62..e84b296a04ecf 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/histogram.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/histogram.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: number: @@ -14,9 +12,6 @@ setup: fields: nanos: type: date_nanos - - do: - cluster.health: - wait_for_status: green --- "Basic test": @@ -722,7 +717,6 @@ setup: index: test_2 body: settings: - number_of_replicas: 0 number_of_shards: 1 mappings: properties: @@ -780,7 +774,6 @@ setup: index: test_2 body: settings: - number_of_replicas: 0 number_of_shards: 1 mappings: properties: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_prefix.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_prefix.yml index 33c6c5d78f897..f590fa8807d7b 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_prefix.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_prefix.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: ipv4: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_range.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_range.yml index 461732a0d461d..0dde5cc5c6326 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_range.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ip_range.yml @@ -3,17 +3,11 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: ip: type: ip - - do: - cluster.health: - wait_for_status: yellow - --- "IP range": - do: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/max_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/max_metric.yml index 962643766817f..8c81f74a4c664 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/max_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/max_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -17,16 +15,12 @@ setup: - do: indices.create: index: date_test_1 - body: - settings: - number_of_replicas: 0 + body: {} - do: indices.create: index: date_test_2 body: - settings: - number_of_replicas: 0 mappings: properties: date_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/median_absolute_deviation_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/median_absolute_deviation_metric.yml index 2b7425e3c4dc8..65408812c6518 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/median_absolute_deviation_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/median_absolute_deviation_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: int_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/min_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/min_metric.yml index bf1c74b115efd..b08f9a6905572 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/min_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/min_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -47,8 +45,6 @@ setup: indices.create: index: test_2 body: - settings: - number_of_replicas: 0 mappings: properties: other_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/missing.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/missing.yml index bd7bb925a7adb..237378bf90a28 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/missing.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/missing.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: field1: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_hdr_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_hdr_metric.yml index 5c00bd1cf732a..a6c1f34cacec2 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_hdr_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_hdr_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: int: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_tdigest_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_tdigest_metric.yml index a6a187aa9d7fd..60ab82588006c 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_tdigest_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentile_ranks_tdigest_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: int: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml index 21f3ad31558af..6bf37425d9af4 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml @@ -4,7 +4,6 @@ setup: index: test_1 body: settings: - number_of_replicas: 0 number_of_shards: 5 number_of_routing_shards: 5 mappings: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_tdigest_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_tdigest_metric.yml index d08aaf99f3068..c703727c9146f 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_tdigest_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_tdigest_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/pipeline.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/pipeline.yml index d78b5890ee43a..3189afce08a88 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/pipeline.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/pipeline.yml @@ -7,8 +7,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -389,7 +387,6 @@ setup: index: test body: settings: - number_of_replicas: 0 number_of_shards: 3 - do: bulk: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range.yml index 88760b99714a6..cf110dc3e7af2 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: double: @@ -20,8 +18,6 @@ setup: indices.create: index: date_range_test body: - settings: - number_of_replicas: 0 mappings: properties: date: @@ -32,17 +28,11 @@ setup: indices.create: index: long_value_test body: - settings: - number_of_replicas: 0 mappings: properties: long: type: long - - do: - cluster.health: - wait_for_status: yellow - - do: bulk: index: test diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range_timezone_bug.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range_timezone_bug.yml index dba480e31985a..87641f43211db 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range_timezone_bug.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/range_timezone_bug.yml @@ -3,18 +3,12 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: mydate: type: date format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ" - - do: - cluster.health: - wait_for_status: green - - do: index: index: test diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/rare_terms.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/rare_terms.yml index a2e74fdbd58b7..3235925d8e00e 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/rare_terms.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/rare_terms.yml @@ -6,8 +6,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: str: @@ -23,11 +21,6 @@ setup: date: type: date - - - do: - cluster.health: - wait_for_status: green - --- "Basic test": - do: @@ -370,8 +363,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: str: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric.yml index cb8c0c9cdc5a0..b39f67a56b6fd 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 2 - number_of_replicas: 0 mappings: properties: transaction: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/significant_texts.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/significant_texts.yml index 1b1b9e0ed99bf..ef63c2853edef 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/significant_texts.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/significant_texts.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: full_text: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric.yml index 74440e039eb6e..54b30fbbbe077 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/sum_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/sum_metric.yml index d147b71d010c5..9c80fe8e9a43e 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/sum_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/sum_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: @@ -47,8 +45,6 @@ setup: indices.create: index: test_2 body: - settings: - number_of_replicas: 0 mappings: properties: other_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms.yml index 8fff72a240bd7..faedbbc997e11 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: str: @@ -29,16 +28,11 @@ setup: indices.create: index: test_2 body: - settings: - number_of_replicas: 0 mappings: properties: number: type: double - - do: - cluster.health: - wait_for_status: green --- "Basic test": - do: @@ -869,7 +863,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: str: @@ -1382,7 +1375,6 @@ Value type mismatch fails shard: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: ip: @@ -1393,7 +1385,6 @@ Value type mismatch fails shard: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: ip: @@ -1458,7 +1449,6 @@ Value type mismatch fails shard with no docs: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: ip: @@ -1469,7 +1459,6 @@ Value type mismatch fails shard with no docs: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: ip: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_disable_opt.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_disable_opt.yml index d21321dd2fff4..a3e606beeadad 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_disable_opt.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_disable_opt.yml @@ -11,7 +11,6 @@ setup: settings: index: number_of_shards: 1 - number_of_replicas: 0 - do: bulk: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_flattened_field.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_flattened_field.yml index 5e41bf11a2be3..9576b3a3d9589 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_flattened_field.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/terms_flattened_field.yml @@ -5,16 +5,11 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: flattened: type: flattened - - do: - cluster.health: - wait_for_status: green - - do: bulk: refresh: true diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml index 553ad5c2019d7..37306151a72be 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml @@ -4,7 +4,6 @@ setup: index: tsdb body: settings: - number_of_replicas: 0 mode: time_series routing_path: [key] time_series: @@ -18,10 +17,6 @@ setup: "@timestamp": type: date - - do: - cluster.health: - wait_for_status: green - - do: bulk: index: tsdb diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/top_hits_nested_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/top_hits_nested_metric.yml index 5b05382eaa292..fa675360881ea 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/top_hits_nested_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/top_hits_nested_metric.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: users: @@ -89,7 +88,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: _source: enabled: false diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/typed_keys.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/typed_keys.yml index d041432556430..fdc997e771311 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/typed_keys.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/typed_keys.yml @@ -4,8 +4,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: name: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/value_count_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/value_count_metric.yml index 2c46a2035b386..c3191a30d77b3 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/value_count_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/value_count_metric.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test_1 body: - settings: - number_of_replicas: 0 mappings: properties: int_field: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/variable_width_histogram.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/variable_width_histogram.yml index 2e2e086dc0bcb..d42bc1cbff436 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/variable_width_histogram.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/variable_width_histogram.yml @@ -3,8 +3,6 @@ setup: indices.create: index: test body: - settings: - number_of_replicas: 0 mappings: properties: number: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/weighted_avg.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/weighted_avg.yml index 2ffc7eaf74625..e988f62082565 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/weighted_avg.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/weighted_avg.yml @@ -5,7 +5,6 @@ setup: body: settings: number_of_shards: 1 - number_of_replicas: 0 mappings: properties: double_field: diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index d80bf0ea47e77..08d64e2b9353b 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -27,6 +27,20 @@ restResources { } } +def excludeList = [] +// Excluding these cache aggregation tests from mixed cluster qa, +// because we can't hit the same node reliable. The qa cluster +// consists of 4 nodes. Two nodes are on old version and the +// other two nodes on the current version. The node selector skips +// the nodes on current version. The rest client then round robins +// between the two nodes on old version. In order to unmute this, +// we need a different node selector, that always consistently +// selects the same node. +excludeList.add('aggregations/adjacency_matrix/Terms lookup') +excludeList.add('aggregations/filter/Standard queries get cached') +excludeList.add('aggregations/filter/Terms lookup gets cached') +excludeList.add('aggregations/filters_bucket/cache hits') + BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { @@ -55,6 +69,9 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> baseCluster.get().nextNodeToNextVersion() nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) + if (excludeList.isEmpty() == false) { + systemProperty 'tests.rest.blacklist', excludeList.join(',') + } } systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" onlyIf("BWC tests disabled") { project.bwc_tests_enabled } From a381fc40adfe14655d7bb40778b41db392a98b49 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 13 Sep 2023 09:34:26 -0700 Subject: [PATCH 044/114] Adjust logging level in ESQL (#99543) I noticed that when testing with the security track, this logging line produced a large paragraph. It should have the DEBUG level instead of INFO. --- .../org/elasticsearch/xpack/esql/plugin/ComputeService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 27634bf0d4eaa..e3d0034abde6b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -136,7 +136,7 @@ public void execute( } QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan); - LOGGER.info("Sending data node plan\n{}\n with filter [{}]", dataNodePlan, requestFilter); + LOGGER.debug("Sending data node plan\n{}\n with filter [{}]", dataNodePlan, requestFilter); String[] originalIndices = PlannerUtils.planOriginalIndices(physicalPlan); computeTargetNodes( From 7b4d2faae95190f8c41295905d90f1dec2b4423a Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 13 Sep 2023 19:59:23 +0300 Subject: [PATCH 045/114] ESQL: Fully replace text fields with their exact representation (#99390) Replacing at the plan level an attribute with its exact subfield ends up breaks down when performing extraction since the name is used verbatim. To avoid more disruptive changes, use the SQL approach of switching the field late in the process, when doing query translation. --- .../optimizer/LocalPhysicalPlanOptimizer.java | 8 +++++- .../esql/optimizer/LogicalPlanOptimizer.java | 27 ++----------------- .../xpack/esql/planner/AggregateMapper.java | 11 ++++++-- .../xpack/esql/planner/Layout.java | 6 ++++- .../optimizer/LogicalPlanOptimizerTests.java | 4 +-- .../optimizer/PhysicalPlanOptimizerTests.java | 10 +++++++ .../xpack/ql/planner/QlTranslatorHandler.java | 8 ++---- 7 files changed, 37 insertions(+), 37 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 8182b41df339f..e837858153cc1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -286,7 +286,7 @@ private boolean canPushDownOrders(List orders) { private List buildFieldSorts(List orders) { List sorts = new ArrayList<>(orders.size()); for (Order o : orders) { - sorts.add(new EsQueryExec.FieldSort(((FieldAttribute) o.child()), o.direction(), o.nullsPosition())); + sorts.add(new EsQueryExec.FieldSort(((FieldAttribute) o.child()).exactAttribute(), o.direction(), o.nullsPosition())); } return sorts; } @@ -296,6 +296,12 @@ private static final class EsqlTranslatorHandler extends QlTranslatorHandler { @Override public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) { if (field instanceof FieldAttribute fa) { + if (fa.getExactInfo().hasExact()) { + var exact = fa.exactAttribute(); + if (exact != fa) { + fa = exact; + } + } return ExpressionTranslator.wrapIfNested(new SingleValueQuery(querySupplier.get(), fa.name()), field); } if (field instanceof MetadataAttribute) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 2e14c33c701cb..086736403d590 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.ExpressionSet; import org.elasticsearch.xpack.ql.expression.Expressions; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; @@ -87,12 +86,8 @@ protected List> batches() { } protected static List> rules() { - var substitutions = new Batch<>( - "Substitutions", - Limiter.ONCE, - new SubstituteSurrogates(), - new ReplaceRegexMatch(), - new ReplaceFieldAttributesWithExactSubfield() + var substitutions = new Batch<>("Substitutions", Limiter.ONCE, new SubstituteSurrogates(), new ReplaceRegexMatch() + // new ReplaceTextFieldAttributesWithTheKeywordSubfield() ); var operators = new Batch<>( @@ -855,22 +850,4 @@ protected Expression regexToEquals(RegexMatch regexMatch, Literal literal) { return new Equals(regexMatch.source(), regexMatch.field(), literal); } } - - private static class ReplaceFieldAttributesWithExactSubfield extends OptimizerRules.OptimizerRule { - - @Override - protected LogicalPlan rule(LogicalPlan plan) { - if (plan instanceof Filter || plan instanceof OrderBy || plan instanceof Aggregate) { - return plan.transformExpressionsOnly(FieldAttribute.class, ReplaceFieldAttributesWithExactSubfield::toExact); - } - return plan; - } - - private static FieldAttribute toExact(FieldAttribute fa) { - if (fa.getExactInfo().hasExact() && fa.exactAttribute() != fa) { - return fa.exactAttribute(); - } - return fa; - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 81fe43ebade7f..86ad56c115b3a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.ql.expression.Alias; +import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.MetadataAttribute; @@ -80,7 +81,7 @@ record AggDef(Class aggClazz, String type, boolean grouping) {} } public List mapNonGrouping(List aggregates) { - return aggregates.stream().flatMap(agg -> map(agg, false)).toList(); + return doMapping(aggregates, false); } public List mapNonGrouping(Expression aggregate) { @@ -88,7 +89,13 @@ public List mapNonGrouping(Expression aggregate) { } public List mapGrouping(List aggregates) { - return aggregates.stream().flatMap(agg -> map(agg, true)).toList(); + return doMapping(aggregates, true); + } + + private List doMapping(List aggregates, boolean grouping) { + AttributeMap attrToExpressions = new AttributeMap<>(); + aggregates.stream().flatMap(agg -> map(agg, grouping)).forEach(ne -> attrToExpressions.put(ne.toAttribute(), ne)); + return attrToExpressions.values().stream().toList(); } public List mapGrouping(Expression aggregate) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java index a97a467aa3c0a..d9df22fca5d21 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java @@ -118,7 +118,11 @@ public Layout build() { for (ChannelSet set : channels) { int channel = numberOfChannels++; for (NameId id : set.nameIds) { - layout.putIfAbsent(id, new ChannelAndType(channel, set.type)); + ChannelAndType next = new ChannelAndType(channel, set.type); + ChannelAndType prev = layout.put(id, next); + if (prev != null) { + throw new IllegalArgumentException("Name [" + id + "] is on two channels [" + prev + "] and [" + next + "]"); + } } } return new DefaultLayout(Collections.unmodifiableMap(layout), numberOfChannels); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index bd1615ef9e5f2..8c8962b43433a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1457,7 +1457,7 @@ public void testReplaceFilterWithExact() { var filter = as(limit.child(), Filter.class); Equals equals = as(filter.condition(), Equals.class); FieldAttribute left = as(equals.left(), FieldAttribute.class); - assertThat(left.name(), equalTo("job.raw")); + assertThat(left.name(), equalTo("job")); } public void testReplaceExpressionWithExact() { @@ -1481,7 +1481,7 @@ public void testReplaceSortWithExact() { var topN = as(plan, TopN.class); assertThat(topN.order().size(), equalTo(1)); var sortField = as(topN.order().get(0).child(), FieldAttribute.class); - assertThat(sortField.name(), equalTo("job.raw")); + assertThat(sortField.name(), equalTo("job")); } public void testPruneUnusedEval() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 87693bffc4433..746a34eaedce4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -1732,6 +1732,16 @@ public void testTextWithRawSortPushDown() { assertThat(source.sorts().get(0).field().name(), equalTo("job.raw")); } + public void testFieldExtractForTextAndSubfield() { + var plan = physicalPlan(""" + from test + | keep job* + """); + + var project = as(plan, ProjectExec.class); + assertThat(Expressions.names(project.projections()), contains("job", "job.raw")); + } + public void testFieldExtractWithoutSourceAttributes() { PhysicalPlan verifiedPlan = optimizedPlan(physicalPlan(""" from test diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/QlTranslatorHandler.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/QlTranslatorHandler.java index 04633b449bc45..939b019b320ae 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/QlTranslatorHandler.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/QlTranslatorHandler.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ql.planner; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.NamedExpression; +import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypeConverter; @@ -22,11 +22,7 @@ public Query asQuery(Expression e) { @Override public String nameOf(Expression e) { - if (e instanceof NamedExpression) { - return ((NamedExpression) e).name(); - } else { - return e.sourceText(); - } + return Expressions.name(e); } @Override From af756278e5ea7a6474751a87c883224545c9b136 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 13 Sep 2023 10:12:53 -0700 Subject: [PATCH 046/114] Remove uuids from TransportVersion constants (#99500) This commit renames a few of the most recent TransportVersion constants to have meaningful names. Going forward all constants should have meaningful names. Since the names will now be unique, the uuid is no longer needed to force merge conflicts, so they are removed. --- CONTRIBUTING.md | 20 +- .../org/elasticsearch/TransportVersions.java | 200 +++++++++--------- .../action/search/SearchResponse.java | 2 +- .../version/CompatibilityVersions.java | 4 +- .../common/geo/GeoBoundingBox.java | 2 +- .../common/io/stream/StreamOutput.java | 5 +- .../geo/GeoBoundsGenericWriteableTests.java | 4 +- .../StopTrainedModelDeploymentAction.java | 4 +- .../application/search/SearchApplication.java | 2 +- .../PinnedQueryBuilder.java | 2 +- .../spatial/common/CartesianBoundingBox.java | 2 +- 11 files changed, 120 insertions(+), 127 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fab62dd025ac4..4f9f432bca467 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -659,11 +659,11 @@ numbering scheme separate to release version. The main ones are `TransportVersion` and `IndexVersion`, representing the version of the inter-node binary protocol and index data + metadata respectively. -Separated version numbers are comprised of a simple incrementing number, -with no semantic versioning information. There is no direct mapping between -separated version numbers and the release version. The versions used by any -particular instance of Elasticsearch can be obtained by querying `/` -on the node. +Separated version numbers are comprised of an integer number. The semantic +meaing of a version number are defined within each `*Version` class. There +is no direct mapping between separated version numbers and the release version. +The versions used by any particular instance of Elasticsearch can be obtained +by querying `/_nodes/info` on the node. #### Using separated version numbers @@ -674,14 +674,8 @@ number, there are a few rules that need to be followed: and should not be modified once it is defined. Each version is immutable once merged into `main`. 2. To create a new component version, add a new constant to the respective class - using the preceding version number +1, modify the version id string to a new - unique string (normally a UUID), and set that constant as the new current - version. - -The version ID string in the constant definition is not used in the executing -code; it is there to ensure that if two concurrent pull requests add the same -version constant, there will be a git conflict on those lines. This is to ensure -two PRs don't accidentally use the same version constant. + with a descriptive name of the change being made. Increment the integer + number according to the partciular `*Version` class. If your pull request has a conflict around your new version constant, you need to update your PR from `main` and change your PR to use the next diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0c16ee5cb068b..844931dee9eb4 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -8,13 +8,13 @@ package org.elasticsearch; -import org.elasticsearch.common.Strings; import org.elasticsearch.core.Assertions; import java.lang.reflect.Field; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.NavigableMap; import java.util.Set; @@ -27,117 +27,115 @@ public class TransportVersions { * This map is used during class construction, referenced by the registerTransportVersion method. * When all the transport version constants have been registered, the map is cleared & never touched again. */ - static Map IDS = new HashMap<>(); + static Set IDS = new HashSet<>(); - static TransportVersion def(int id, String uniqueId) { + static TransportVersion def(int id) { if (IDS == null) throw new IllegalStateException("The IDS map needs to be present to call this method"); - Strings.requireNonEmpty(uniqueId, "Each TransportVersion needs a unique string id"); - Integer existing = IDS.put(uniqueId, id); - if (existing != null) { - throw new IllegalArgumentException("Versions " + id + " and " + existing + " have the same unique id"); + if (IDS.add(id) == false) { + throw new IllegalArgumentException("Version id " + id + " defined twice"); } return new TransportVersion(id); } - public static final TransportVersion ZERO = def(0, "00000000-0000-0000-0000-000000000000"); - public static final TransportVersion V_7_0_0 = def(7_00_00_99, "7505fd05-d982-43ce-a63f-ff4c6c8bdeec"); - public static final TransportVersion V_7_0_1 = def(7_00_01_99, "ae772780-e6f9-46a1-b0a0-20ed0cae37f7"); - public static final TransportVersion V_7_1_0 = def(7_01_00_99, "fd09007c-1c54-450a-af99-9f941e1a53c2"); - public static final TransportVersion V_7_2_0 = def(7_02_00_99, "b74dbc52-e727-472c-af21-2156482e8796"); - public static final TransportVersion V_7_2_1 = def(7_02_01_99, "a3217b94-f436-4aab-a020-162c83ba18f2"); - public static final TransportVersion V_7_3_0 = def(7_03_00_99, "4f04e4c9-c5aa-49e4-8b99-abeb4e284a5a"); - public static final TransportVersion V_7_3_2 = def(7_03_02_99, "60da3953-8415-4d4f-a18d-853c3e68ebd6"); - public static final TransportVersion V_7_4_0 = def(7_04_00_99, "ec7e58aa-55b4-4064-a9dd-fd723a2ba7a8"); - public static final TransportVersion V_7_5_0 = def(7_05_00_99, "cc6e14dc-9dc7-4b74-8e15-1f99a6cfbe03"); - public static final TransportVersion V_7_6_0 = def(7_06_00_99, "4637b8ae-f3df-43ae-a065-ad4c29f3373a"); - public static final TransportVersion V_7_7_0 = def(7_07_00_99, "7bb73c48-ddb8-4437-b184-30371c35dd4b"); - public static final TransportVersion V_7_8_0 = def(7_08_00_99, "c3cc74af-d15e-494b-a907-6ad6dd2f4660"); - public static final TransportVersion V_7_8_1 = def(7_08_01_99, "7acb9f6e-32f2-45ce-b87d-ca1f165b8e7a"); - public static final TransportVersion V_7_9_0 = def(7_09_00_99, "9388fe76-192a-4053-b51c-d2a7b8eae545"); - public static final TransportVersion V_7_10_0 = def(7_10_00_99, "4efca195-38e4-4f74-b877-c26fb2a40733"); - public static final TransportVersion V_7_10_1 = def(7_10_01_99, "0070260c-aa0b-4fc2-9c87-5cd5f23b005f"); - public static final TransportVersion V_7_11_0 = def(7_11_00_99, "3b43bcbc-1c5e-4cc2-a3b4-8ac8b64239e8"); - public static final TransportVersion V_7_12_0 = def(7_12_00_99, "3be9ff6f-2d9f-4fc2-ba91-394dd5ebcf33"); - public static final TransportVersion V_7_13_0 = def(7_13_00_99, "e1fe494a-7c66-4571-8f8f-1d7e6d8df1b3"); - public static final TransportVersion V_7_14_0 = def(7_14_00_99, "8cf0954c-b085-467f-b20b-3cb4b2e69e3e"); - public static final TransportVersion V_7_15_0 = def(7_15_00_99, "2273ac0e-00bb-4024-9e2e-ab78981623c6"); - public static final TransportVersion V_7_15_1 = def(7_15_01_99, "a8c3503d-3452-45cf-b385-e855e16547fe"); - public static final TransportVersion V_7_16_0 = def(7_16_00_99, "59abadd2-25db-4547-a991-c92306a3934e"); - public static final TransportVersion V_7_17_0 = def(7_17_00_99, "322efe93-4c73-4e15-9274-bb76836c8fa8"); - public static final TransportVersion V_7_17_1 = def(7_17_01_99, "51c72842-7974-4669-ad25-bf13ba307307"); - public static final TransportVersion V_7_17_8 = def(7_17_08_99, "82a3e70d-cf0e-4efb-ad16-6077ab9fe19f"); - public static final TransportVersion V_8_0_0 = def(8_00_00_99, "c7d2372c-9f01-4a79-8b11-227d862dfe4f"); - public static final TransportVersion V_8_1_0 = def(8_01_00_99, "3dc49dce-9cef-492a-ac8d-3cc79f6b4280"); - public static final TransportVersion V_8_2_0 = def(8_02_00_99, "8ce6d555-202e-47db-ab7d-ade9dda1b7e8"); - public static final TransportVersion V_8_3_0 = def(8_03_00_99, "559ddb66-d857-4208-bed5-a995ccf478ea"); - public static final TransportVersion V_8_4_0 = def(8_04_00_99, "c0d12906-aa5b-45d4-94c7-cbcf4d9818ca"); - public static final TransportVersion V_8_5_0 = def(8_05_00_99, "be3d7f23-7240-4904-9d7f-e25a0f766eca"); - public static final TransportVersion V_8_6_0 = def(8_06_00_99, "e209c5ed-3488-4415-b561-33492ca3b789"); - public static final TransportVersion V_8_6_1 = def(8_06_01_99, "9f113acb-1b21-4fda-bef9-2a3e669b5c7b"); - public static final TransportVersion V_8_7_0 = def(8_07_00_99, "f1ee7a85-4fa6-43f5-8679-33e2b750448b"); - public static final TransportVersion V_8_7_1 = def(8_07_01_99, "018de9d8-9e8b-4ac7-8f4b-3a6fbd0487fb"); - public static final TransportVersion V_8_8_0 = def(8_08_00_99, "f64fe576-0767-4ec3-984e-3e30b33b6c46"); - public static final TransportVersion V_8_8_1 = def(8_08_01_99, "291c71bb-5b0a-4b7e-a407-6e53bc128d0f"); + public static final TransportVersion ZERO = def(0); + public static final TransportVersion V_7_0_0 = def(7_00_00_99); + public static final TransportVersion V_7_0_1 = def(7_00_01_99); + public static final TransportVersion V_7_1_0 = def(7_01_00_99); + public static final TransportVersion V_7_2_0 = def(7_02_00_99); + public static final TransportVersion V_7_2_1 = def(7_02_01_99); + public static final TransportVersion V_7_3_0 = def(7_03_00_99); + public static final TransportVersion V_7_3_2 = def(7_03_02_99); + public static final TransportVersion V_7_4_0 = def(7_04_00_99); + public static final TransportVersion V_7_5_0 = def(7_05_00_99); + public static final TransportVersion V_7_6_0 = def(7_06_00_99); + public static final TransportVersion V_7_7_0 = def(7_07_00_99); + public static final TransportVersion V_7_8_0 = def(7_08_00_99); + public static final TransportVersion V_7_8_1 = def(7_08_01_99); + public static final TransportVersion V_7_9_0 = def(7_09_00_99); + public static final TransportVersion V_7_10_0 = def(7_10_00_99); + public static final TransportVersion V_7_10_1 = def(7_10_01_99); + public static final TransportVersion V_7_11_0 = def(7_11_00_99); + public static final TransportVersion V_7_12_0 = def(7_12_00_99); + public static final TransportVersion V_7_13_0 = def(7_13_00_99); + public static final TransportVersion V_7_14_0 = def(7_14_00_99); + public static final TransportVersion V_7_15_0 = def(7_15_00_99); + public static final TransportVersion V_7_15_1 = def(7_15_01_99); + public static final TransportVersion V_7_16_0 = def(7_16_00_99); + public static final TransportVersion V_7_17_0 = def(7_17_00_99); + public static final TransportVersion V_7_17_1 = def(7_17_01_99); + public static final TransportVersion V_7_17_8 = def(7_17_08_99); + public static final TransportVersion V_8_0_0 = def(8_00_00_99); + public static final TransportVersion V_8_1_0 = def(8_01_00_99); + public static final TransportVersion V_8_2_0 = def(8_02_00_99); + public static final TransportVersion V_8_3_0 = def(8_03_00_99); + public static final TransportVersion V_8_4_0 = def(8_04_00_99); + public static final TransportVersion V_8_5_0 = def(8_05_00_99); + public static final TransportVersion V_8_6_0 = def(8_06_00_99); + public static final TransportVersion V_8_6_1 = def(8_06_01_99); + public static final TransportVersion V_8_7_0 = def(8_07_00_99); + public static final TransportVersion V_8_7_1 = def(8_07_01_99); + public static final TransportVersion V_8_8_0 = def(8_08_00_99); + public static final TransportVersion V_8_8_1 = def(8_08_01_99); /* * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW TRANSPORT VERSIONS * Detached transport versions added below here. */ - public static final TransportVersion V_8_500_020 = def(8_500_020, "ECB42C26-B258-42E5-A835-E31AF84A76DE"); - public static final TransportVersion V_8_500_021 = def(8_500_021, "102e0d84-0c08-402c-a696-935f3a3da873"); - public static final TransportVersion V_8_500_022 = def(8_500_022, "4993c724-7a81-4955-84e7-403484610091"); - public static final TransportVersion V_8_500_023 = def(8_500_023, "01b06435-5d73-42ff-a121-3b36b771375e"); - public static final TransportVersion V_8_500_024 = def(8_500_024, "db337007-f823-4dbd-968e-375383814c17"); - public static final TransportVersion V_8_500_025 = def(8_500_025, "b2ab7b75-5ac2-4a3b-bbb6-8789ca66722d"); - public static final TransportVersion V_8_500_026 = def(8_500_026, "965d294b-14aa-4abb-bcfc-34631187941d"); - public static final TransportVersion V_8_500_027 = def(8_500_027, "B151D967-8E7C-401C-8275-0ABC06335F2D"); - public static final TransportVersion V_8_500_028 = def(8_500_028, "a6592d08-15cb-4e1a-b9b4-b2ba24058444"); - public static final TransportVersion V_8_500_029 = def(8_500_029, "f3bd98af-6187-e161-e315-718a2fecc2db"); - public static final TransportVersion V_8_500_030 = def(8_500_030, "b72d7f12-8ed3-4a5b-8e6a-4910ea10e0d7"); - public static final TransportVersion V_8_500_031 = def(8_500_031, "e7aa7e95-37e7-46a3-aad1-90a21c0769e7"); - public static final TransportVersion V_8_500_032 = def(8_500_032, "a9a14bc6-c3f2-41d9-a3d8-c686bf2c901d"); - public static final TransportVersion V_8_500_033 = def(8_500_033, "193ab7c4-a751-4cbd-a66a-2d7d56ccbc10"); - public static final TransportVersion V_8_500_034 = def(8_500_034, "16871c8b-88ba-4432-980a-10fd9ecad2dc"); - public static final TransportVersion V_8_500_035 = def(8_500_035, "664dd6ce-3487-4fbd-81a9-af778b28be45"); - public static final TransportVersion V_8_500_036 = def(8_500_036, "3343c64f-d7ac-4f02-9262-3e1acfc56f89"); - public static final TransportVersion V_8_500_037 = def(8_500_037, "d76a4f22-8878-43e0-acfa-15e452195fa7"); - public static final TransportVersion V_8_500_038 = def(8_500_038, "9ef93580-feae-409f-9989-b49e411ca7a9"); - public static final TransportVersion V_8_500_039 = def(8_500_039, "c23722d7-6139-4cf2-b8a1-600fbd4ec359"); - public static final TransportVersion V_8_500_040 = def(8_500_040, "8F3AA068-A608-4A16-9683-2412A75BF2DD"); - public static final TransportVersion V_8_500_041 = def(8_500_041, "5b6a0fd0-ac0b-443f-baae-cffec140905c"); - public static final TransportVersion V_8_500_042 = def(8_500_042, "763b4801-a4fc-47c4-aff5-7f5a757b8a07"); - public static final TransportVersion V_8_500_043 = def(8_500_043, "50babd14-7f5c-4f8c-9351-94e0d397aabc"); - public static final TransportVersion V_8_500_044 = def(8_500_044, "96b83320-2317-4e9d-b735-356f18c1d76a"); - public static final TransportVersion V_8_500_045 = def(8_500_045, "24a596dd-c843-4c0a-90b3-759697d74026"); - public static final TransportVersion V_8_500_046 = def(8_500_046, "61666d4c-a4f0-40db-8a3d-4806718247c5"); - public static final TransportVersion V_8_500_047 = def(8_500_047, "4b1682fe-c37e-4184-80f6-7d57fcba9b3d"); - public static final TransportVersion V_8_500_048 = def(8_500_048, "f9658aa5-f066-4edb-bcb9-40bf256c9294"); - public static final TransportVersion V_8_500_049 = def(8_500_049, "828bb6ce-2fbb-11ee-be56-0242ac120002"); - public static final TransportVersion V_8_500_050 = def(8_500_050, "69722fa2-7c0a-4227-86fb-6d6a9a0a0321"); - public static final TransportVersion V_8_500_051 = def(8_500_051, "a28b43bc-bb5f-4406-afcf-26900aa98a71"); - public static final TransportVersion V_8_500_052 = def(8_500_052, "2d382b3d-9838-4cce-84c8-4142113e5c2b"); - public static final TransportVersion V_8_500_053 = def(8_500_053, "aa603bae-01e2-380a-8950-6604468e8c6d"); - public static final TransportVersion V_8_500_054 = def(8_500_054, "b76ef950-af03-4dda-85c2-6400ec442e7e"); - public static final TransportVersion V_8_500_055 = def(8_500_055, "7831c609-0df1-42d6-aa97-8a346c389ef"); - public static final TransportVersion V_8_500_056 = def(8_500_056, "afa8c4be-29c9-48ab-b1ed-7182415c1b71"); - public static final TransportVersion V_8_500_057 = def(8_500_057, "80c088c6-358d-43b2-8d9c-1ea3c6c2b9fd"); - public static final TransportVersion V_8_500_058 = def(8_500_058, "41d9c98a-1de2-4dc1-86f1-abd4cc1bef57"); - public static final TransportVersion V_8_500_059 = def(8_500_059, "2f2090c0-7cd0-4a10-8f02-63d26073604f"); - public static final TransportVersion V_8_500_060 = def(8_500_060, "ec065a44-b468-4f8a-aded-7b90ca8d792b"); - public static final TransportVersion V_8_500_061 = def(8_500_061, "4e07f830-8be4-448c-851e-62b3d2f0bf0a"); - public static final TransportVersion V_8_500_062 = def(8_500_062, "09CD9C9B-3207-4B40-8756-B7A12001A885"); - public static final TransportVersion V_8_500_063 = def(8_500_063, "31dedced-0055-4f34-b952-2f6919be7488"); - public static final TransportVersion V_8_500_064 = def(8_500_064, "3a795175-5e6f-40ff-90fe-5571ea8ab04e"); - public static final TransportVersion V_8_500_065 = def(8_500_065, "4e253c58-1b3d-11ee-be56-0242ac120002"); - public static final TransportVersion V_8_500_066 = def(8_500_066, "F398ECC6-5D2A-4BD8-A9E8-1101F030DF85"); - public static final TransportVersion V_8_500_067 = def(8_500_067, "a7c86604-a917-4aff-9a1b-a4d44c3dbe02"); - public static final TransportVersion V_8_500_068 = def(8_500_068, "2683c8b4-5372-4a6a-bb3a-d61aa679089a"); - public static final TransportVersion V_8_500_069 = def(8_500_069, "5b804027-d8a0-421b-9970-1f53d766854b"); - public static final TransportVersion V_8_500_070 = def(8_500_070, "6BADC9CD-3C9D-4381-8BD9-B305CAA93F86"); - public static final TransportVersion V_8_500_071 = def(8_500_071, "a86dfc08-3026-4f01-90ef-6d6de003e217"); - public static final TransportVersion V_8_500_072 = def(8_500_072, "e2df7d80-7b74-4afd-9734-aee0fc256025"); - public static final TransportVersion V_8_500_073 = def(8_500_073, "9128e16a-e4f7-41c4-b04f-842955bfc1b4"); - public static final TransportVersion V_8_500_074 = def(8_500_074, "aab0c31c-62d7-4b95-bb29-0b6f367ece64"); + public static final TransportVersion V_8_500_020 = def(8_500_020); + public static final TransportVersion V_8_500_021 = def(8_500_021); + public static final TransportVersion V_8_500_022 = def(8_500_022); + public static final TransportVersion V_8_500_023 = def(8_500_023); + public static final TransportVersion V_8_500_024 = def(8_500_024); + public static final TransportVersion V_8_500_025 = def(8_500_025); + public static final TransportVersion V_8_500_026 = def(8_500_026); + public static final TransportVersion V_8_500_027 = def(8_500_027); + public static final TransportVersion V_8_500_028 = def(8_500_028); + public static final TransportVersion V_8_500_029 = def(8_500_029); + public static final TransportVersion V_8_500_030 = def(8_500_030); + public static final TransportVersion V_8_500_031 = def(8_500_031); + public static final TransportVersion V_8_500_032 = def(8_500_032); + public static final TransportVersion V_8_500_033 = def(8_500_033); + public static final TransportVersion V_8_500_034 = def(8_500_034); + public static final TransportVersion V_8_500_035 = def(8_500_035); + public static final TransportVersion V_8_500_036 = def(8_500_036); + public static final TransportVersion V_8_500_037 = def(8_500_037); + public static final TransportVersion V_8_500_038 = def(8_500_038); + public static final TransportVersion V_8_500_039 = def(8_500_039); + public static final TransportVersion V_8_500_040 = def(8_500_040); + public static final TransportVersion V_8_500_041 = def(8_500_041); + public static final TransportVersion V_8_500_042 = def(8_500_042); + public static final TransportVersion V_8_500_043 = def(8_500_043); + public static final TransportVersion V_8_500_044 = def(8_500_044); + public static final TransportVersion V_8_500_045 = def(8_500_045); + public static final TransportVersion V_8_500_046 = def(8_500_046); + public static final TransportVersion V_8_500_047 = def(8_500_047); + public static final TransportVersion V_8_500_048 = def(8_500_048); + public static final TransportVersion V_8_500_049 = def(8_500_049); + public static final TransportVersion V_8_500_050 = def(8_500_050); + public static final TransportVersion V_8_500_051 = def(8_500_051); + public static final TransportVersion V_8_500_052 = def(8_500_052); + public static final TransportVersion V_8_500_053 = def(8_500_053); + public static final TransportVersion V_8_500_054 = def(8_500_054); + public static final TransportVersion V_8_500_055 = def(8_500_055); + public static final TransportVersion V_8_500_056 = def(8_500_056); + public static final TransportVersion V_8_500_057 = def(8_500_057); + public static final TransportVersion V_8_500_058 = def(8_500_058); + public static final TransportVersion V_8_500_059 = def(8_500_059); + public static final TransportVersion V_8_500_060 = def(8_500_060); + public static final TransportVersion V_8_500_061 = def(8_500_061); + public static final TransportVersion V_8_500_062 = def(8_500_062); + public static final TransportVersion V_8_500_063 = def(8_500_063); + public static final TransportVersion V_8_500_064 = def(8_500_064); + public static final TransportVersion V_8_500_065 = def(8_500_065); + public static final TransportVersion V_8_500_066 = def(8_500_066); + public static final TransportVersion SEARCH_RESP_SKIP_UNAVAILABLE_ADDED = def(8_500_067); + public static final TransportVersion ML_TRAINED_MODEL_FINISH_PENDING_WORK_ADDED = def(8_500_068); + public static final TransportVersion SEARCH_APP_INDICES_REMOVED = def(8_500_069); + public static final TransportVersion GENERIC_NAMED_WRITABLE_ADDED = def(8_500_070); + public static final TransportVersion PINNED_QUERY_OPTIONAL_INDEX = def(8_500_071); + public static final TransportVersion SHARD_SIZE_PRIMARY_TERM_GEN_ADDED = def(8_500_072); + public static final TransportVersion COMPAT_VERSIONS_MAPPING_VERSION_ADDED = def(8_500_073); + public static final TransportVersion V_8_500_074 = def(8_500_074); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index fd995d284ea69..278d1bbdf5207 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -990,7 +990,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalLong(took == null ? null : took.millis()); out.writeBoolean(timedOut); out.writeCollection(failures); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_067)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.SEARCH_RESP_SKIP_UNAVAILABLE_ADDED)) { out.writeBoolean(skipUnavailable); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java index a9c5298a4325e..2d3be237c8e94 100644 --- a/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java +++ b/server/src/main/java/org/elasticsearch/cluster/version/CompatibilityVersions.java @@ -101,7 +101,7 @@ public static CompatibilityVersions readVersion(StreamInput in) throws IOExcepti TransportVersion transportVersion = TransportVersion.readVersion(in); Map mappingsVersions = Map.of(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_073)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.COMPAT_VERSIONS_MAPPING_VERSION_ADDED)) { mappingsVersions = in.readMap(SystemIndexDescriptor.MappingsVersion::new); } @@ -112,7 +112,7 @@ public static CompatibilityVersions readVersion(StreamInput in) throws IOExcepti public void writeTo(StreamOutput out) throws IOException { TransportVersion.writeVersion(this.transportVersion(), out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_073)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.COMPAT_VERSIONS_MAPPING_VERSION_ADDED)) { out.writeMap(this.systemIndexMappingsVersion(), (o, v) -> v.writeTo(o)); } } diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java b/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java index 11ba237a11145..7b354bfa767f2 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoBoundingBox.java @@ -100,7 +100,7 @@ public final String getWriteableName() { @Override public final TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_070; + return TransportVersions.GENERIC_NAMED_WRITABLE_ADDED; } protected static class GeoBoundsParser extends BoundsParser { diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index afe94da22d196..693d8efb18347 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -49,7 +49,7 @@ import java.util.function.IntFunction; import static java.util.Map.entry; -import static org.elasticsearch.TransportVersions.V_8_500_070; +import static org.elasticsearch.TransportVersions.GENERIC_NAMED_WRITABLE_ADDED; /** * A stream from another node to this node. Technically, it can also be streamed from a byte array but that is mostly for testing. @@ -768,7 +768,8 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep // Note that we do not rely on the checks in VersionCheckingStreamOutput because that only applies to CCS final var genericNamedWriteable = (GenericNamedWriteable) v; TransportVersion minSupportedVersion = genericNamedWriteable.getMinimalSupportedVersion(); - assert minSupportedVersion.onOrAfter(V_8_500_070) : "[GenericNamedWriteable] requires [" + V_8_500_070 + "]"; + assert minSupportedVersion.onOrAfter(GENERIC_NAMED_WRITABLE_ADDED) + : "[GenericNamedWriteable] requires [" + GENERIC_NAMED_WRITABLE_ADDED + "]"; if (o.getTransportVersion().before(minSupportedVersion)) { final var message = Strings.format( "[%s] requires minimal transport version [%s] and cannot be sent using transport version [%s]", diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java index 500903c437d91..9d02b7b5f4dcd 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoBoundsGenericWriteableTests.java @@ -77,8 +77,8 @@ protected GenericWriteableWrapper copyInstance(GenericWriteableWrapper instance, } public void testSerializationFailsWithOlderVersion() { - TransportVersion older = TransportVersions.V_8_500_069; - assert older.before(TransportVersions.V_8_500_070); + TransportVersion older = TransportVersions.SEARCH_APP_INDICES_REMOVED; + assert older.before(TransportVersions.GENERIC_NAMED_WRITABLE_ADDED); final var testInstance = createTestInstance().geoBoundingBox(); try (var output = new BytesStreamOutput()) { output.setTransportVersion(older); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java index 5f62a48e761da..5fc3776e7013c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopTrainedModelDeploymentAction.java @@ -79,7 +79,7 @@ public Request(StreamInput in) throws IOException { allowNoMatch = in.readBoolean(); force = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_068)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_FINISH_PENDING_WORK_ADDED)) { finishPendingWork = in.readBoolean(); } else { finishPendingWork = false; @@ -132,7 +132,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(allowNoMatch); out.writeBoolean(force); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_068)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_TRAINED_MODEL_FINISH_PENDING_WORK_ADDED)) { out.writeBoolean(finishPendingWork); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java index 1425f18eaa6cb..bd1fd33f4e963 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplication.java @@ -55,7 +55,7 @@ public class SearchApplication implements Writeable, ToXContentObject { + "We recommend storing a template to avoid breaking changes."; public static final String NO_ALIAS_WARNING = "Alias is missing for the search application"; - private static final TransportVersion INDICES_REMOVED_TRANSPORT_VERSION = TransportVersions.V_8_500_069; + private static final TransportVersion INDICES_REMOVED_TRANSPORT_VERSION = TransportVersions.SEARCH_APP_INDICES_REMOVED; private final String name; @Nullable diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java index 1db5d020fc0ee..615261a302877 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java @@ -57,7 +57,7 @@ public class PinnedQueryBuilder extends AbstractQueryBuilder public static final ParseField DOCS_FIELD = new ParseField("docs"); public static final ParseField ORGANIC_QUERY_FIELD = new ParseField("organic"); - private static final TransportVersion OPTIONAL_INDEX_IN_DOCS_VERSION = TransportVersions.V_8_500_071; + private static final TransportVersion OPTIONAL_INDEX_IN_DOCS_VERSION = TransportVersions.PINNED_QUERY_OPTIONAL_INDEX; private final List ids; private final List docs; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java index 7bc2f8d06c945..53d1144069723 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianBoundingBox.java @@ -65,7 +65,7 @@ public final String getWriteableName() { @Override public final TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_070; + return TransportVersions.GENERIC_NAMED_WRITABLE_ADDED; } protected static class CartesianBoundsParser extends BoundsParser { From cb380afb03663f940af55d5957cda3246c96b9f9 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 13 Sep 2023 12:19:40 -0500 Subject: [PATCH 047/114] APM module-info (#99548) Add module-info.java for APM. This allows it to be excluded in other builds. --- modules/apm/src/main/java/module-info.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 modules/apm/src/main/java/module-info.java diff --git a/modules/apm/src/main/java/module-info.java b/modules/apm/src/main/java/module-info.java new file mode 100644 index 0000000000000..d99245304edf1 --- /dev/null +++ b/modules/apm/src/main/java/module-info.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +module org.elasticsearch.tracing.apm { + requires org.elasticsearch.base; + requires org.elasticsearch.server; + requires org.elasticsearch.xcontent; + requires org.apache.logging.log4j; + requires org.apache.lucene.core; + requires io.opentelemetry.context; + + exports org.elasticsearch.tracing.apm; +} From 5d8636af0dd2d0208a718501850d2f572cad2398 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 13 Sep 2023 19:05:21 +0100 Subject: [PATCH 048/114] ESQL: Make Blocks and Vectors Releasable (#99523) This commit makes Blocks and Vectors Releasable. We moving to a model where operators will release blocks that are unused as pages are processed. This allows to release things like BytesRefArrays, bigArrays, etc. As well as eventually to perform circuit breaker accountancy, when a block is no longer needed. --- .../elasticsearch/compute/data/BooleanArrayBlock.java | 5 +++++ .../elasticsearch/compute/data/BooleanArrayVector.java | 5 +++++ .../elasticsearch/compute/data/BooleanVectorBlock.java | 6 ++++++ .../elasticsearch/compute/data/BytesRefArrayBlock.java | 5 +++++ .../compute/data/BytesRefArrayVector.java | 6 ++++++ .../compute/data/BytesRefVectorBlock.java | 6 ++++++ .../compute/data/ConstantBooleanVector.java | 5 +++++ .../compute/data/ConstantBytesRefVector.java | 5 +++++ .../compute/data/ConstantDoubleVector.java | 5 +++++ .../elasticsearch/compute/data/ConstantIntVector.java | 5 +++++ .../elasticsearch/compute/data/ConstantLongVector.java | 5 +++++ .../elasticsearch/compute/data/DoubleArrayBlock.java | 5 +++++ .../elasticsearch/compute/data/DoubleArrayVector.java | 5 +++++ .../elasticsearch/compute/data/DoubleVectorBlock.java | 6 ++++++ .../elasticsearch/compute/data/FilterBooleanBlock.java | 6 ++++++ .../compute/data/FilterBooleanVector.java | 6 ++++++ .../compute/data/FilterBytesRefBlock.java | 6 ++++++ .../compute/data/FilterBytesRefVector.java | 6 ++++++ .../elasticsearch/compute/data/FilterDoubleBlock.java | 6 ++++++ .../elasticsearch/compute/data/FilterDoubleVector.java | 6 ++++++ .../org/elasticsearch/compute/data/FilterIntBlock.java | 6 ++++++ .../elasticsearch/compute/data/FilterIntVector.java | 6 ++++++ .../elasticsearch/compute/data/FilterLongBlock.java | 6 ++++++ .../elasticsearch/compute/data/FilterLongVector.java | 6 ++++++ .../org/elasticsearch/compute/data/IntArrayBlock.java | 5 +++++ .../org/elasticsearch/compute/data/IntArrayVector.java | 5 +++++ .../org/elasticsearch/compute/data/IntVectorBlock.java | 6 ++++++ .../org/elasticsearch/compute/data/LongArrayBlock.java | 5 +++++ .../elasticsearch/compute/data/LongArrayVector.java | 5 +++++ .../elasticsearch/compute/data/LongVectorBlock.java | 6 ++++++ .../java/org/elasticsearch/compute/data/Block.java | 3 ++- .../elasticsearch/compute/data/ConstantNullBlock.java | 5 +++++ .../java/org/elasticsearch/compute/data/DocBlock.java | 6 ++++++ .../java/org/elasticsearch/compute/data/DocVector.java | 6 ++++++ .../java/org/elasticsearch/compute/data/Vector.java | 3 ++- .../elasticsearch/compute/data/X-ArrayBlock.java.st | 5 +++++ .../elasticsearch/compute/data/X-ArrayVector.java.st | 10 ++++++++++ .../compute/data/X-ConstantVector.java.st | 5 +++++ .../elasticsearch/compute/data/X-FilterBlock.java.st | 6 ++++++ .../elasticsearch/compute/data/X-FilterVector.java.st | 6 ++++++ .../elasticsearch/compute/data/X-VectorBlock.java.st | 6 ++++++ 41 files changed, 225 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index dbd3580f90db8..53986bf693122 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -95,4 +95,9 @@ public String toString() { + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 832b8f9f817bd..be89563e1faf3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -77,4 +77,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 4049aec5d9746..6c138b8f56d11 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Block view of a BooleanVector. @@ -71,4 +72,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 38fba2f742bf3..47c75862c6370 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -96,4 +96,9 @@ public String toString() { + values.size() + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 42c92aa3be136..21422598a0bde 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; /** * Vector implementation that stores an array of BytesRef values. @@ -77,4 +78,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ']'; } + + @Override + public void close() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 2b668ff34fe79..791ea6809de63 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Block view of a BytesRefVector. @@ -72,4 +73,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index 3d6abc55d9469..cae795a614732 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -70,4 +70,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index 896ac52bf0bc0..09b8bda0e38ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -71,4 +71,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index 6099864b5b45b..b4aa5012ce2a0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -70,4 +70,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index ab4e063c2ed78..a1ccf781b18f3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -70,4 +70,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index c47c48182f1d1..38672e5df9b04 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -70,4 +70,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index 0fd7aa987c315..6a81b10a3b107 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -95,4 +95,9 @@ public String toString() { + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 2d2052371ed78..6f82d60ae1421 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -77,4 +77,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index 168cdc45167f6..92243b6a53b70 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Block view of a DoubleVector. @@ -71,4 +72,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanBlock.java index ed499489b3bb6..8b410fffc032b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter block for BooleanBlocks. @@ -122,4 +123,9 @@ private void appendValues(StringBuilder sb) { sb.append(']'); } } + + @Override + public void close() { + Releasables.closeExpectNoException(block); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java index c519bc55dabd8..2ff1dcd58dea5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter vector for BooleanVectors. @@ -88,4 +89,9 @@ private void appendValues(StringBuilder sb) { sb.append(getBoolean(i)); } } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefBlock.java index ad2266441fad7..cdc925c01f53e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefBlock.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter block for BytesRefBlocks. @@ -125,4 +126,9 @@ private void appendValues(StringBuilder sb) { sb.append(']'); } } + + @Override + public void close() { + Releasables.closeExpectNoException(block); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java index 3395621af9ccc..0d51121d3c0ec 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter vector for BytesRefVectors. @@ -89,4 +90,9 @@ private void appendValues(StringBuilder sb) { sb.append(getBytesRef(i, new BytesRef())); } } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleBlock.java index 51136e2c8def7..2d02a8665b87f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter block for DoubleBlocks. @@ -122,4 +123,9 @@ private void appendValues(StringBuilder sb) { sb.append(']'); } } + + @Override + public void close() { + Releasables.closeExpectNoException(block); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java index 08ce7cefcd48a..1c1c6d1c3db02 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter vector for DoubleVectors. @@ -88,4 +89,9 @@ private void appendValues(StringBuilder sb) { sb.append(getDouble(i)); } } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntBlock.java index b915e40ab2d05..ffd72ec10d234 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter block for IntBlocks. @@ -122,4 +123,9 @@ private void appendValues(StringBuilder sb) { sb.append(']'); } } + + @Override + public void close() { + Releasables.closeExpectNoException(block); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java index c4954318f0a99..f0833c1878b61 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter vector for IntVectors. @@ -88,4 +89,9 @@ private void appendValues(StringBuilder sb) { sb.append(getInt(i)); } } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongBlock.java index 7461e5cbb0dc1..85f72b1b0b44e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter block for LongBlocks. @@ -122,4 +123,9 @@ private void appendValues(StringBuilder sb) { sb.append(']'); } } + + @Override + public void close() { + Releasables.closeExpectNoException(block); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java index 12dce9350e080..5eb987863aa80 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter vector for LongVectors. @@ -88,4 +89,9 @@ private void appendValues(StringBuilder sb) { sb.append(getLong(i)); } } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index bc7f4fefdefb5..8df36ff95117b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -95,4 +95,9 @@ public String toString() { + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index a614f21c70b1d..d8e15408d4492 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -77,4 +77,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 0d6d2e21bf36e..20499fe900558 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Block view of a IntVector. @@ -71,4 +72,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index f973539e08009..9a1681a97a27c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -95,4 +95,9 @@ public String toString() { + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index dafe44b22415e..a9d7cdfb40bf8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -77,4 +77,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index 330c10d6927fc..cf8fc931f1351 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Block view of a LongVector. @@ -71,4 +72,9 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index f948b647d9c45..d80119ad57fae 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Releasable; import java.util.List; @@ -31,7 +32,7 @@ * *

Block are immutable and can be passed between threads. */ -public interface Block extends Accountable, NamedWriteable { +public interface Block extends Accountable, NamedWriteable, Releasable { /** * {@return an efficient dense single-value view of this block}. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index f3d26d443d2fa..5a5ed16738810 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -119,6 +119,11 @@ public String toString() { return "ConstantNullBlock[positions=" + getPositionCount() + "]"; } + @Override + public void close() { + // no-op + } + static class Builder implements Block.Builder { private int positionCount; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 364a8f413ef0f..8f2c2474d7f63 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -56,6 +57,11 @@ public long ramBytesUsed() { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); } + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } + /** * A builder the for {@link DocBlock}. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 5227609ec71ee..cecae256e8974 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.IntroSorter; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * {@link Vector} where each entry references a lucene document. @@ -198,4 +199,9 @@ public static long ramBytesEstimated( public long ramBytesUsed() { return ramBytesEstimated(shards, segments, docs, shardSegmentDocMapForwards, shardSegmentDocMapBackwards); } + + @Override + public void close() { + Releasables.closeExpectNoException(shards, segments, docs); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 06997090ddbb4..e8636e2a39970 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,11 +8,12 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.core.Releasable; /** * A dense Vector of single values. */ -public interface Vector extends Accountable { +public interface Vector extends Accountable, Releasable { /** * {@return Returns a Block view over this vector.} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 83a91a751e1b6..b0fbea3feccd4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -121,4 +121,9 @@ $else$ $endif$ + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 8b71ea69ab058..103e8bc22d9ed 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; $else$ import org.apache.lucene.util.RamUsageEstimator; @@ -103,6 +104,15 @@ $if(BytesRef)$ return getClass().getSimpleName() + "[positions=" + getPositionCount() + ']'; $else$ return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; +$endif$ + } + + @Override + public void close() { +$if(BytesRef)$ + Releasables.closeExpectNoException(values); +$else$ + // no-op $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 75a77da220435..8f6c911dc4ebb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -77,4 +77,9 @@ $endif$ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } + + @Override + public void close() { + // no-op + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterBlock.java.st index 3dfaf02dc7c99..a4c524422533d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterBlock.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter block for $Type$Blocks. @@ -146,4 +147,9 @@ $endif$ sb.append(']'); } } + + @Override + public void close() { + Releasables.closeExpectNoException(block); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st index bf7c7b399aa76..e68fc6838d3f4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Filter vector for $Type$Vectors. @@ -100,4 +101,9 @@ $else$ $endif$ } } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index c6c8df46ba4e8..3abc702839118 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; /** * Block view of a $Type$Vector. @@ -79,4 +80,9 @@ $endif$ public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } + + @Override + public void close() { + Releasables.closeExpectNoException(vector); + } } From 7be3d2c191a24e6ffac3cbee8d96f0829a614930 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 13 Sep 2023 11:18:40 -0700 Subject: [PATCH 049/114] Increase flexibility of test cluster execution environments (#99437) --- .../internal/InternalTestArtifactPlugin.java | 7 +- .../test/rest/RestTestBasePlugin.java | 3 + modules/build.gradle | 1 + qa/full-cluster-restart/build.gradle | 6 +- rest-api-spec/build.gradle | 3 +- .../test/rest/ESRestTestCase.java | 9 + .../test/cluster/ClusterHandle.java | 81 ----- .../test/cluster/ElasticsearchCluster.java | 23 +- .../local/AbstractLocalClusterFactory.java | 38 +- .../AbstractLocalClusterSpecBuilder.java | 3 +- .../local/AbstractLocalSpecBuilder.java | 11 - .../local/DefaultLocalClusterFactory.java | 29 ++ .../local/DefaultLocalClusterHandle.java | 265 ++++++++++++++ .../local/DefaultLocalClusterSpecBuilder.java | 7 +- .../DefaultLocalElasticsearchCluster.java} | 10 +- .../cluster/local/LocalClusterFactory.java | 20 +- .../cluster/local/LocalClusterHandle.java | 328 +++++------------- .../test/cluster/local/LocalClusterSpec.java | 12 +- .../test/cluster/local/LocalSpecBuilder.java | 6 - x-pack/plugin/eql/qa/rest/build.gradle | 1 + .../elasticsearch/xpack/eql/EqlRestIT.java | 9 - .../xpack/eql/EqlTestCluster.java | 1 - .../xpack/eql/EqlClientYamlIT.java | 1 - .../security/qa/service-account/build.gradle | 4 - 24 files changed, 458 insertions(+), 420 deletions(-) create mode 100644 test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterFactory.java create mode 100644 test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java rename test/test-clusters/src/main/java/org/elasticsearch/test/cluster/{DefaultElasticsearchCluster.java => local/DefaultLocalElasticsearchCluster.java} (88%) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactPlugin.java index 2f45c6ba69b43..53cd6000e82c1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactPlugin.java @@ -23,7 +23,10 @@ public class InternalTestArtifactPlugin implements Plugin { public void apply(Project project) { project.getPlugins().apply(InternalTestArtifactBasePlugin.class); InternalTestArtifactExtension testArtifactExtension = project.getExtensions().getByType(InternalTestArtifactExtension.class); - SourceSet testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test"); - testArtifactExtension.registerTestArtifactFromSourceSet(testSourceSet); + project.getExtensions().getByType(SourceSetContainer.class).all(sourceSet -> { + if (sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME) == false) { + testArtifactExtension.registerTestArtifactFromSourceSet(sourceSet); + } + }); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 3fa67f2067229..1ff6e2f505436 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -37,6 +37,7 @@ import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.attributes.Attribute; import org.gradle.api.file.FileTree; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.ClasspathNormalizer; @@ -69,6 +70,7 @@ public class RestTestBasePlugin implements Plugin { private static final String MODULES_CONFIGURATION = "clusterModules"; private static final String PLUGINS_CONFIGURATION = "clusterPlugins"; private static final String EXTRACTED_PLUGINS_CONFIGURATION = "extractedPlugins"; + private static final Attribute CONFIGURATION_ATTRIBUTE = Attribute.of("test-cluster-artifacts", String.class); private final ProviderFactory providerFactory; @@ -249,6 +251,7 @@ private Optional findModulePath(Project project, String pluginName) { private Configuration createPluginConfiguration(Project project, String name, boolean useExploded, boolean isExtended) { return project.getConfigurations().create(name, c -> { + c.attributes(a -> a.attribute(CONFIGURATION_ATTRIBUTE, name)); if (useExploded) { c.attributes(a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE)); } else { diff --git a/modules/build.gradle b/modules/build.gradle index 44b5ccd7c2dcc..ad7049a9905f0 100644 --- a/modules/build.gradle +++ b/modules/build.gradle @@ -10,6 +10,7 @@ configure(subprojects.findAll { it.parent.path == project.path }) { group = 'org.elasticsearch.plugin' // for modules which publish client jars apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.internal-es-plugin' + apply plugin: 'elasticsearch.internal-test-artifact' esplugin { // for local ES plugins, the name of the plugin is the same as the directory diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index b6f181809e0e4..06442b7cfe6a6 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -10,13 +10,9 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.internal-test-artifact-base' +apply plugin: 'elasticsearch.internal-test-artifact' apply plugin: 'elasticsearch.bwc-test' -testArtifacts { - registerTestArtifactFromSourceSet(sourceSets.javaRestTest) -} - BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 70be9f92cc92c..d9c0ab5294906 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -1,11 +1,10 @@ -import org.elasticsearch.gradle.Version - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.validate-rest-spec' apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-test-artifact' restResources { restTests { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 5f8dfb87dbb0c..3041e918bc5a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1294,6 +1294,15 @@ protected Settings restClientSettings() { if (System.getProperty("tests.rest.client_path_prefix") != null) { builder.put(CLIENT_PATH_PREFIX, System.getProperty("tests.rest.client_path_prefix")); } + if (System.getProperty("tests.rest.cluster.username") != null) { + if (System.getProperty("tests.rest.cluster.password") == null) { + throw new IllegalStateException("The 'test.rest.cluster.password' system property must be set."); + } + String username = System.getProperty("tests.rest.cluster.username"); + String password = System.getProperty("tests.rest.cluster.password"); + String token = basicAuthHeaderValue(username, new SecureString(password.toCharArray())); + return builder.put(ThreadContext.PREFIX + ".Authorization", token).build(); + } return builder.build(); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java index 6e921952bab7b..76635c1a30e7c 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java @@ -8,10 +8,7 @@ package org.elasticsearch.test.cluster; -import org.elasticsearch.test.cluster.util.Version; - import java.io.Closeable; -import java.io.InputStream; /** * A handle to an {@link ElasticsearchCluster}. @@ -30,19 +27,6 @@ public interface ClusterHandle extends Closeable { */ void stop(boolean forcibly); - /** - * Stops the node at a given index. - * @param index of the node to stop - */ - void stopNode(int index, boolean forcibly); - - /** - * Restarts the cluster. Effectively the same as calling {@link #stop(boolean)} followed by {@link #start()} - * - * @param forcibly whether to ficibly terminate the cluster - */ - void restart(boolean forcibly); - /** * Whether the cluster is started or not. This method makes no guarantees on cluster availability, only that the node processes have * been started. @@ -67,74 +51,9 @@ public interface ClusterHandle extends Closeable { */ String getHttpAddress(int index); - /** - * Get the name of the node for the given index. - */ - String getName(int index); - - /** - * Get the pid of the node for the given index. - */ - long getPid(int index); - - /** - * Returns a comma-separated list of TCP transport endpoints for cluster. If this method is called on an unstarted cluster, the cluster - * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability.\ - * - * @return cluster node TCP transport endpoints - */ - String getTransportEndpoints(); - - /** - * Returns the TCP transport endpoint for the node at the given index. If this method is called on an unstarted cluster, the cluster - * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. - * - * @return cluster node TCP transport endpoints - */ - String getTransportEndpoint(int index); - - /** - * Returns a comma-separated list of remote cluster server endpoints for cluster. If this method is called on an unstarted cluster, - * the cluster will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. - * Note individual node can enable or disable remote cluster server independently. When a node has remote cluster server disabled, - * an empty string is returned for that node. Hence, it is possible for this method to return something like "[::1]:63300,,". - * - * @return cluster node remote cluster server endpoints - */ - String getRemoteClusterServerEndpoints(); - - /** - * Returns the remote cluster server endpoint for the node at the given index. If this method is called on an unstarted cluster, - * the cluster will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. - * Note individual node can enable or disable remote cluster server independently. When a node has remote cluster server disabled, - * an empty string is returned. - * - * @return cluster node remote cluster server endpoints - */ - String getRemoteClusterServerEndpoint(int index); - - /** - * Upgrades a single node to the given version. Method blocks until the node is back up and ready to respond to requests. - * - * @param index index of node ot upgrade - * @param version version to upgrade to - */ - void upgradeNodeToVersion(int index, Version version); - - /** - * Performs a "full cluster restart" upgrade to the given version. Method blocks until the cluster is restarted and available. - * - * @param version version to upgrade to - */ - void upgradeToVersion(Version version); - /** * Cleans up any resources created by this cluster. Calling this method will forcibly terminate any running nodes. */ void close(); - /** - * Returns an {@link InputStream} for the given node log. - */ - InputStream getNodeLog(int index, LogType logType); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ElasticsearchCluster.java index 6ff771b9ca1e6..21dbf838f4214 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ElasticsearchCluster.java @@ -9,9 +9,14 @@ package org.elasticsearch.test.cluster; import org.elasticsearch.test.cluster.local.DefaultLocalClusterSpecBuilder; +import org.elasticsearch.test.cluster.local.LocalClusterHandle; import org.elasticsearch.test.cluster.local.LocalClusterSpecBuilder; import org.junit.rules.TestRule; +import java.util.List; +import java.util.ServiceLoader; +import java.util.stream.Collectors; + /** *

A JUnit test rule for orchestrating an Elasticsearch cluster for local integration testing. New clusters can be created via one of the * various static builder methods. For example:

@@ -20,7 +25,7 @@ * public static ElasticsearchCluster myCluster = ElasticsearchCluster.local().build(); * */ -public interface ElasticsearchCluster extends TestRule, ClusterHandle { +public interface ElasticsearchCluster extends TestRule, LocalClusterHandle { /** * Creates a new {@link LocalClusterSpecBuilder} for defining a locally orchestrated cluster. Local clusters use a locally built @@ -29,7 +34,21 @@ public interface ElasticsearchCluster extends TestRule, ClusterHandle { * @return a builder for a local cluster */ static LocalClusterSpecBuilder local() { - return new DefaultLocalClusterSpecBuilder(); + return locateBuilderImpl(); } + @SuppressWarnings({ "unchecked", "rawtypes" }) + private static LocalClusterSpecBuilder locateBuilderImpl() { + ServiceLoader loader = ServiceLoader.load(LocalClusterSpecBuilder.class); + List> providers = loader.stream().toList(); + + if (providers.isEmpty()) { + return new DefaultLocalClusterSpecBuilder(); + } else if (providers.size() > 1) { + String providerTypes = providers.stream().map(p -> p.type().getName()).collect(Collectors.joining(",")); + throw new IllegalStateException("Located multiple LocalClusterSpecBuilder providers [" + providerTypes + "]"); + } + + return providers.get(0).get(); + } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index c897a0026c6c7..261b72efe77e3 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.test.cluster.ClusterFactory; import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.test.cluster.local.LocalClusterSpec.LocalNodeSpec; import org.elasticsearch.test.cluster.local.distribution.DistributionDescriptor; @@ -63,7 +62,7 @@ public abstract class AbstractLocalClusterFactory implements - ClusterFactory { + LocalClusterFactory { private static final Logger LOGGER = LogManager.getLogger(AbstractLocalClusterFactory.class); private static final Duration NODE_UP_TIMEOUT = Duration.ofMinutes(2); private static final Map, DistributionDescriptor> TEST_DISTRIBUTIONS = new ConcurrentHashMap<>(); @@ -106,6 +105,7 @@ public static class Node { private final Path logsDir; private final Path configDir; private final Path tempDir; + private final boolean usesSecureSecretsFile; private Path distributionDir; private Version currentVersion; @@ -113,10 +113,17 @@ public static class Node { private DistributionDescriptor distributionDescriptor; public Node(Path baseWorkingDir, DistributionResolver distributionResolver, LocalNodeSpec spec) { - this(baseWorkingDir, distributionResolver, spec, null); + this(baseWorkingDir, distributionResolver, spec, null, false); } - public Node(Path baseWorkingDir, DistributionResolver distributionResolver, LocalNodeSpec spec, String suffix) { + public Node( + Path baseWorkingDir, + DistributionResolver distributionResolver, + LocalNodeSpec spec, + String suffix, + boolean usesSecureSecretsFile + ) { + this.usesSecureSecretsFile = usesSecureSecretsFile; this.objectMapper = new ObjectMapper(); this.baseWorkingDir = baseWorkingDir; this.distributionResolver = distributionResolver; @@ -155,10 +162,13 @@ public synchronized void start(Version version) { } writeConfiguration(); - createKeystore(); - addKeystoreSettings(); - addKeystoreFiles(); - writeSecureSecretsFile(); + if (usesSecureSecretsFile) { + writeSecureSecretsFile(); + } else { + createKeystore(); + addKeystoreSettings(); + addKeystoreFiles(); + } configureSecurity(); startElasticsearch(); @@ -476,12 +486,20 @@ private void addKeystoreFiles() { } private void writeSecureSecretsFile() { - if (spec.getSecrets().isEmpty() == false) { + if (spec.getKeystoreFiles().isEmpty() == false) { + throw new IllegalStateException( + "Non-string secure secrets are not supported in serverless. Secrets: [" + + spec.getKeystoreFiles().keySet().stream().collect(Collectors.joining(",")) + + "]" + ); + } + Map secrets = spec.resolveKeystore(); + if (secrets.isEmpty() == false) { try { Path secretsFile = configDir.resolve("secrets/secrets.json"); Files.createDirectories(secretsFile.getParent()); Map secretsFileContent = new HashMap<>(); - secretsFileContent.put("secrets", spec.getSecrets()); + secretsFileContent.put("secrets", secrets); secretsFileContent.put("metadata", Map.of("version", "1", "compatibility", spec.getVersion().toString())); Files.writeString(secretsFile, objectMapper.writeValueAsString(secretsFileContent)); } catch (IOException e) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java index 6cd7e60cf5d01..d06be2463cc5b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterSpecBuilder.java @@ -161,8 +161,7 @@ private LocalNodeSpec build(LocalClusterSpec cluster) { getKeystorePassword(), getExtraConfigFiles(), getSystemProperties(), - getJvmArgs(), - getSecrets() + getJvmArgs() ); } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java index 76517b2fcd282..0cc9d4a360fb8 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java @@ -39,7 +39,6 @@ public abstract class AbstractLocalSpecBuilder> im private final Map extraConfigFiles = new HashMap<>(); private final Map systemProperties = new HashMap<>(); private final List jvmArgs = new ArrayList<>(); - private final Map secrets = new HashMap<>(); private DistributionType distributionType; private Version version; private String keystorePassword; @@ -189,16 +188,6 @@ public List getKeystoreProviders() { return inherit(() -> parent.getKeystoreProviders(), keystoreProviders); } - @Override - public T secret(String key, String value) { - this.secrets.put(key, value); - return cast(this); - } - - public Map getSecrets() { - return inherit(() -> parent.getSecrets(), secrets); - } - @Override public T configFile(String fileName, Resource configFile) { this.extraConfigFiles.put(fileName, configFile); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterFactory.java new file mode 100644 index 0000000000000..abb2947689636 --- /dev/null +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterFactory.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.cluster.local; + +import org.elasticsearch.test.cluster.local.distribution.DistributionResolver; + +import java.nio.file.Path; + +public class DefaultLocalClusterFactory extends AbstractLocalClusterFactory { + private final DistributionResolver distributionResolver; + + public DefaultLocalClusterFactory(DistributionResolver distributionResolver) { + super(distributionResolver); + this.distributionResolver = distributionResolver; + } + + protected DefaultLocalClusterHandle createHandle(Path baseWorkingDir, LocalClusterSpec spec) { + return new DefaultLocalClusterHandle( + spec.getName(), + spec.getNodes().stream().map(s -> new Node(baseWorkingDir, distributionResolver, s)).toList() + ); + } +} diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java new file mode 100644 index 0000000000000..12483808ea7ce --- /dev/null +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java @@ -0,0 +1,265 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.cluster.local; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.test.cluster.LogType; +import org.elasticsearch.test.cluster.local.AbstractLocalClusterFactory.Node; +import org.elasticsearch.test.cluster.local.model.User; +import org.elasticsearch.test.cluster.util.ExceptionUtils; +import org.elasticsearch.test.cluster.util.Version; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.net.MalformedURLException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.ForkJoinWorkerThread; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class DefaultLocalClusterHandle implements LocalClusterHandle { + private static final Logger LOGGER = LogManager.getLogger(DefaultLocalClusterHandle.class); + private static final Duration CLUSTER_UP_TIMEOUT = Duration.ofSeconds(30); + + public final ForkJoinPool executor = new ForkJoinPool( + Math.max(Runtime.getRuntime().availableProcessors(), 4), + new ForkJoinPool.ForkJoinWorkerThreadFactory() { + private final AtomicLong counter = new AtomicLong(0); + + @Override + public ForkJoinWorkerThread newThread(ForkJoinPool pool) { + ForkJoinWorkerThread thread = ForkJoinPool.defaultForkJoinWorkerThreadFactory.newThread(pool); + thread.setName(name + "-node-executor-" + counter.getAndIncrement()); + return thread; + } + }, + null, + false + ); + private final AtomicBoolean started = new AtomicBoolean(false); + private final String name; + private final List nodes; + + public DefaultLocalClusterHandle(String name, List nodes) { + this.name = name; + this.nodes = nodes; + } + + @Override + public void start() { + if (started.getAndSet(true) == false) { + LOGGER.info("Starting Elasticsearch test cluster '{}'", name); + execute(() -> nodes.parallelStream().forEach(n -> n.start(null))); + } + waitUntilReady(); + } + + @Override + public void stop(boolean forcibly) { + if (started.getAndSet(false)) { + LOGGER.info("Stopping Elasticsearch test cluster '{}', forcibly: {}", name, forcibly); + execute(() -> nodes.parallelStream().forEach(n -> stopNode(nodes.indexOf(n), forcibly))); + } else { + // Make sure the process is stopped, otherwise wait + execute(() -> nodes.parallelStream().forEach(Node::waitForExit)); + } + } + + @Override + public void restart(boolean forcibly) { + stop(forcibly); + start(); + } + + @Override + public boolean isStarted() { + return started.get(); + } + + @Override + public void close() { + stop(true); + + executor.shutdownNow(); + try { + executor.awaitTermination(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getHttpAddresses() { + start(); + return execute(() -> nodes.parallelStream().map(Node::getHttpAddress).collect(Collectors.joining(","))); + } + + @Override + public String getHttpAddress(int index) { + return getHttpAddresses().split(",")[index]; + } + + @Override + public String getTransportEndpoints() { + start(); + return execute(() -> nodes.parallelStream().map(Node::getTransportEndpoint).collect(Collectors.joining(","))); + } + + @Override + public String getTransportEndpoint(int index) { + return getTransportEndpoints().split(",")[index]; + } + + @Override + public String getRemoteClusterServerEndpoints() { + start(); + return execute(() -> nodes.parallelStream().map(Node::getRemoteClusterServerEndpoint).collect(Collectors.joining(","))); + } + + @Override + public String getRemoteClusterServerEndpoint(int index) { + return getRemoteClusterServerEndpoints().split(",")[index]; + } + + @Override + public void upgradeNodeToVersion(int index, Version version) { + Node node = nodes.get(index); + node.stop(false); + LOGGER.info("Upgrading node '{}' to version {}", node.getName(), version); + node.start(version); + waitUntilReady(); + } + + @Override + public void upgradeToVersion(Version version) { + stop(false); + if (started.getAndSet(true) == false) { + LOGGER.info("Upgrading Elasticsearch test cluster '{}' to version {}", name, version); + execute(() -> nodes.parallelStream().forEach(n -> n.start(version))); + } + waitUntilReady(); + } + + public String getName(int index) { + return nodes.get(index).getName(); + } + + @Override + public long getPid(int index) { + return nodes.get(index).getPid(); + } + + public void stopNode(int index, boolean forcibly) { + nodes.get(index).stop(false); + } + + @Override + public InputStream getNodeLog(int index, LogType logType) { + return nodes.get(index).getLog(logType); + } + + protected void waitUntilReady() { + writeUnicastHostsFile(); + try { + WaitForHttpResource wait = configureWaitForReady(); + wait.waitFor(CLUSTER_UP_TIMEOUT.toMillis()); + } catch (Exception e) { + throw new RuntimeException("An error occurred while checking cluster '" + name + "' status.", e); + } + } + + private WaitForHttpResource configureWaitForReady() throws MalformedURLException { + Node node = nodes.get(0); + boolean securityEnabled = Boolean.parseBoolean(node.getSpec().getSetting("xpack.security.enabled", "true")); + boolean sslEnabled = Boolean.parseBoolean(node.getSpec().getSetting("xpack.security.http.ssl.enabled", "false")); + boolean securityAutoConfigured = isSecurityAutoConfigured(node); + String scheme = securityEnabled && (sslEnabled || securityAutoConfigured) ? "https" : "http"; + WaitForHttpResource wait = new WaitForHttpResource(scheme, node.getHttpAddress(), nodes.size()); + User credentials = node.getSpec().getUsers().get(0); + wait.setUsername(credentials.getUsername()); + wait.setPassword(credentials.getPassword()); + if (sslEnabled) { + configureWaitSecurity(wait, node); + } else if (securityAutoConfigured) { + wait.setCertificateAuthorities(node.getWorkingDir().resolve("config/certs/http_ca.crt").toFile()); + } + + return wait; + } + + private void configureWaitSecurity(WaitForHttpResource wait, Node node) { + String caFile = node.getSpec().getSetting("xpack.security.http.ssl.certificate_authorities", null); + if (caFile != null) { + wait.setCertificateAuthorities(node.getWorkingDir().resolve("config").resolve(caFile).toFile()); + } + String sslCertFile = node.getSpec().getSetting("xpack.security.http.ssl.certificate", null); + if (sslCertFile != null) { + wait.setCertificateAuthorities(node.getWorkingDir().resolve("config").resolve(sslCertFile).toFile()); + } + String sslKeystoreFile = node.getSpec().getSetting("xpack.security.http.ssl.keystore.path", null); + if (sslKeystoreFile != null && caFile == null) { // Can not set both trust stores and CA + wait.setTrustStoreFile(node.getWorkingDir().resolve("config").resolve(sslKeystoreFile).toFile()); + } + String keystorePassword = node.getSpec().getSetting("xpack.security.http.ssl.keystore.secure_password", null); + if (keystorePassword != null) { + wait.setTrustStorePassword(keystorePassword); + } + } + + private boolean isSecurityAutoConfigured(Node node) { + Path configFile = node.getWorkingDir().resolve("config").resolve("elasticsearch.yml"); + try (Stream lines = Files.lines(configFile)) { + return lines.anyMatch(l -> l.contains("BEGIN SECURITY AUTO CONFIGURATION")); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private void writeUnicastHostsFile() { + String transportUris = execute(() -> nodes.parallelStream().map(Node::getTransportEndpoint).collect(Collectors.joining("\n"))); + execute(() -> nodes.parallelStream().forEach(node -> { + try { + Path hostsFile = node.getWorkingDir().resolve("config").resolve("unicast_hosts.txt"); + if (Files.notExists(hostsFile)) { + Files.writeString(hostsFile, transportUris); + } + } catch (IOException e) { + throw new UncheckedIOException("Failed to write unicast_hosts for: " + node, e); + } + })); + } + + private T execute(Callable task) { + try { + return executor.submit(task).get(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } catch (ExecutionException e) { + throw new RuntimeException("An error occurred orchestrating test cluster.", ExceptionUtils.findRootCause(e)); + } + } + + private void execute(Runnable task) { + execute(() -> { + task.run(); + return true; + }); + } +} diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java index e8c5d7aac6e88..dd47fd057ac1c 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.test.cluster.local; -import org.elasticsearch.test.cluster.DefaultElasticsearchCluster; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.LocalDistributionResolver; import org.elasticsearch.test.cluster.local.distribution.ReleasedDistributionResolver; @@ -27,9 +26,11 @@ public DefaultLocalClusterSpecBuilder() { @Override public ElasticsearchCluster build() { - return new DefaultElasticsearchCluster<>( + return new DefaultLocalElasticsearchCluster<>( this::buildClusterSpec, - new LocalClusterFactory(new LocalDistributionResolver(new SnapshotDistributionResolver(new ReleasedDistributionResolver()))) + new DefaultLocalClusterFactory( + new LocalDistributionResolver(new SnapshotDistributionResolver(new ReleasedDistributionResolver())) + ) ); } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java similarity index 88% rename from test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java rename to test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index 247356dbc7a87..ea15f770b4e6c 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -6,8 +6,10 @@ * Side Public License, v 1. */ -package org.elasticsearch.test.cluster; +package org.elasticsearch.test.cluster.local; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.test.cluster.util.Version; import org.junit.runner.Description; import org.junit.runners.model.Statement; @@ -15,12 +17,12 @@ import java.io.InputStream; import java.util.function.Supplier; -public class DefaultElasticsearchCluster implements ElasticsearchCluster { +public class DefaultLocalElasticsearchCluster implements ElasticsearchCluster { private final Supplier specProvider; - private final ClusterFactory clusterFactory; + private final LocalClusterFactory clusterFactory; private H handle; - public DefaultElasticsearchCluster(Supplier specProvider, ClusterFactory clusterFactory) { + public DefaultLocalElasticsearchCluster(Supplier specProvider, LocalClusterFactory clusterFactory) { this.specProvider = specProvider; this.clusterFactory = clusterFactory; } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java index 683befec249b9..edfd6f6af41f7 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java @@ -8,22 +8,6 @@ package org.elasticsearch.test.cluster.local; -import org.elasticsearch.test.cluster.local.distribution.DistributionResolver; +import org.elasticsearch.test.cluster.ClusterFactory; -import java.nio.file.Path; - -public class LocalClusterFactory extends AbstractLocalClusterFactory { - private final DistributionResolver distributionResolver; - - public LocalClusterFactory(DistributionResolver distributionResolver) { - super(distributionResolver); - this.distributionResolver = distributionResolver; - } - - protected LocalClusterHandle createHandle(Path baseWorkingDir, LocalClusterSpec spec) { - return new LocalClusterHandle( - spec.getName(), - spec.getNodes().stream().map(s -> new Node(baseWorkingDir, distributionResolver, s)).toList() - ); - } -} +public interface LocalClusterFactory extends ClusterFactory {} diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index d096222362d83..a3bba54bb4bf8 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -8,259 +8,89 @@ package org.elasticsearch.test.cluster.local; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.test.cluster.ClusterHandle; import org.elasticsearch.test.cluster.LogType; -import org.elasticsearch.test.cluster.local.AbstractLocalClusterFactory.Node; -import org.elasticsearch.test.cluster.local.model.User; -import org.elasticsearch.test.cluster.util.ExceptionUtils; import org.elasticsearch.test.cluster.util.Version; -import java.io.IOException; import java.io.InputStream; -import java.io.UncheckedIOException; -import java.net.MalformedURLException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ForkJoinPool; -import java.util.concurrent.ForkJoinWorkerThread; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; -import java.util.stream.Stream; -public class LocalClusterHandle implements ClusterHandle { - private static final Logger LOGGER = LogManager.getLogger(LocalClusterHandle.class); - private static final Duration CLUSTER_UP_TIMEOUT = Duration.ofSeconds(30); - - public final ForkJoinPool executor = new ForkJoinPool( - Math.max(Runtime.getRuntime().availableProcessors(), 4), - new ForkJoinPool.ForkJoinWorkerThreadFactory() { - private final AtomicLong counter = new AtomicLong(0); - - @Override - public ForkJoinWorkerThread newThread(ForkJoinPool pool) { - ForkJoinWorkerThread thread = ForkJoinPool.defaultForkJoinWorkerThreadFactory.newThread(pool); - thread.setName(name + "-node-executor-" + counter.getAndIncrement()); - return thread; - } - }, - null, - false - ); - private final AtomicBoolean started = new AtomicBoolean(false); - private final String name; - private final List nodes; - - public LocalClusterHandle(String name, List nodes) { - this.name = name; - this.nodes = nodes; - } - - @Override - public void start() { - if (started.getAndSet(true) == false) { - LOGGER.info("Starting Elasticsearch test cluster '{}'", name); - execute(() -> nodes.parallelStream().forEach(n -> n.start(null))); - } - waitUntilReady(); - } - - @Override - public void stop(boolean forcibly) { - if (started.getAndSet(false)) { - LOGGER.info("Stopping Elasticsearch test cluster '{}', forcibly: {}", name, forcibly); - execute(() -> nodes.parallelStream().forEach(n -> stopNode(nodes.indexOf(n), forcibly))); - } else { - // Make sure the process is stopped, otherwise wait - execute(() -> nodes.parallelStream().forEach(Node::waitForExit)); - } - } - - @Override - public void restart(boolean forcibly) { - stop(forcibly); - start(); - } - - @Override - public boolean isStarted() { - return started.get(); - } - - @Override - public void close() { - stop(true); - - executor.shutdownNow(); - try { - executor.awaitTermination(5, TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - - @Override - public String getHttpAddresses() { - start(); - return execute(() -> nodes.parallelStream().map(Node::getHttpAddress).collect(Collectors.joining(","))); - } - - @Override - public String getHttpAddress(int index) { - return getHttpAddresses().split(",")[index]; - } - - @Override - public String getTransportEndpoints() { - start(); - return execute(() -> nodes.parallelStream().map(Node::getTransportEndpoint).collect(Collectors.joining(","))); - } - - @Override - public String getTransportEndpoint(int index) { - return getTransportEndpoints().split(",")[index]; - } - - @Override - public String getRemoteClusterServerEndpoints() { - start(); - return execute(() -> nodes.parallelStream().map(Node::getRemoteClusterServerEndpoint).collect(Collectors.joining(","))); - } - - @Override - public String getRemoteClusterServerEndpoint(int index) { - return getRemoteClusterServerEndpoints().split(",")[index]; - } - - @Override - public void upgradeNodeToVersion(int index, Version version) { - Node node = nodes.get(index); - node.stop(false); - LOGGER.info("Upgrading node '{}' to version {}", node.getName(), version); - node.start(version); - waitUntilReady(); - } - - @Override - public void upgradeToVersion(Version version) { - stop(false); - if (started.getAndSet(true) == false) { - LOGGER.info("Upgrading Elasticsearch test cluster '{}' to version {}", name, version); - execute(() -> nodes.parallelStream().forEach(n -> n.start(version))); - } - waitUntilReady(); - } - - public String getName(int index) { - return nodes.get(index).getName(); - } - - @Override - public long getPid(int index) { - return nodes.get(index).getPid(); - } - - public void stopNode(int index, boolean forcibly) { - nodes.get(index).stop(false); - } - - @Override - public InputStream getNodeLog(int index, LogType logType) { - return nodes.get(index).getLog(logType); - } - - protected void waitUntilReady() { - writeUnicastHostsFile(); - try { - WaitForHttpResource wait = configureWaitForReady(); - wait.waitFor(CLUSTER_UP_TIMEOUT.toMillis()); - } catch (Exception e) { - throw new RuntimeException("An error occurred while checking cluster '" + name + "' status.", e); - } - } - - private WaitForHttpResource configureWaitForReady() throws MalformedURLException { - Node node = nodes.get(0); - boolean securityEnabled = Boolean.parseBoolean(node.getSpec().getSetting("xpack.security.enabled", "true")); - boolean sslEnabled = Boolean.parseBoolean(node.getSpec().getSetting("xpack.security.http.ssl.enabled", "false")); - boolean securityAutoConfigured = isSecurityAutoConfigured(node); - String scheme = securityEnabled && (sslEnabled || securityAutoConfigured) ? "https" : "http"; - WaitForHttpResource wait = new WaitForHttpResource(scheme, node.getHttpAddress(), nodes.size()); - User credentials = node.getSpec().getUsers().get(0); - wait.setUsername(credentials.getUsername()); - wait.setPassword(credentials.getPassword()); - if (sslEnabled) { - configureWaitSecurity(wait, node); - } else if (securityAutoConfigured) { - wait.setCertificateAuthorities(node.getWorkingDir().resolve("config/certs/http_ca.crt").toFile()); - } - - return wait; - } - - private void configureWaitSecurity(WaitForHttpResource wait, Node node) { - String caFile = node.getSpec().getSetting("xpack.security.http.ssl.certificate_authorities", null); - if (caFile != null) { - wait.setCertificateAuthorities(node.getWorkingDir().resolve("config").resolve(caFile).toFile()); - } - String sslCertFile = node.getSpec().getSetting("xpack.security.http.ssl.certificate", null); - if (sslCertFile != null) { - wait.setCertificateAuthorities(node.getWorkingDir().resolve("config").resolve(sslCertFile).toFile()); - } - String sslKeystoreFile = node.getSpec().getSetting("xpack.security.http.ssl.keystore.path", null); - if (sslKeystoreFile != null && caFile == null) { // Can not set both trust stores and CA - wait.setTrustStoreFile(node.getWorkingDir().resolve("config").resolve(sslKeystoreFile).toFile()); - } - String keystorePassword = node.getSpec().getSetting("xpack.security.http.ssl.keystore.secure_password", null); - if (keystorePassword != null) { - wait.setTrustStorePassword(keystorePassword); - } - } - - private boolean isSecurityAutoConfigured(Node node) { - Path configFile = node.getWorkingDir().resolve("config").resolve("elasticsearch.yml"); - try (Stream lines = Files.lines(configFile)) { - return lines.anyMatch(l -> l.contains("BEGIN SECURITY AUTO CONFIGURATION")); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - private void writeUnicastHostsFile() { - String transportUris = execute(() -> nodes.parallelStream().map(Node::getTransportEndpoint).collect(Collectors.joining("\n"))); - execute(() -> nodes.parallelStream().forEach(node -> { - try { - Path hostsFile = node.getWorkingDir().resolve("config").resolve("unicast_hosts.txt"); - if (Files.notExists(hostsFile)) { - Files.writeString(hostsFile, transportUris); - } - } catch (IOException e) { - throw new UncheckedIOException("Failed to write unicast_hosts for: " + node, e); - } - })); - } - - private T execute(Callable task) { - try { - return executor.submit(task).get(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } catch (ExecutionException e) { - throw new RuntimeException("An error occurred orchestrating test cluster.", ExceptionUtils.findRootCause(e)); - } - } - - private void execute(Runnable task) { - execute(() -> { - task.run(); - return true; - }); - } +public interface LocalClusterHandle extends ClusterHandle { + /** + * Stops the node at a given index. + * @param index of the node to stop + */ + void stopNode(int index, boolean forcibly); + + /** + * Restarts the cluster. Effectively the same as calling {@link #stop(boolean)} followed by {@link #start()} + * + * @param forcibly whether to ficibly terminate the cluster + */ + void restart(boolean forcibly); + + /** + * Get the name of the node for the given index. + */ + String getName(int index); + + /** + * Get the pid of the node for the given index. + */ + long getPid(int index); + + /** + * Returns a comma-separated list of TCP transport endpoints for cluster. If this method is called on an unstarted cluster, the cluster + * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability.\ + * + * @return cluster node TCP transport endpoints + */ + String getTransportEndpoints(); + + /** + * Returns the TCP transport endpoint for the node at the given index. If this method is called on an unstarted cluster, the cluster + * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. + * + * @return cluster node TCP transport endpoints + */ + String getTransportEndpoint(int index); + + /** + * Returns a comma-separated list of remote cluster server endpoints for cluster. If this method is called on an unstarted cluster, + * the cluster will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. + * Note individual node can enable or disable remote cluster server independently. When a node has remote cluster server disabled, + * an empty string is returned for that node. Hence, it is possible for this method to return something like "[::1]:63300,,". + * + * @return cluster node remote cluster server endpoints + */ + String getRemoteClusterServerEndpoints(); + + /** + * Returns the remote cluster server endpoint forx the node at the given index. If this method is called on an unstarted cluster, + * the cluster will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. + * Note individual node can enable or disable remote cluster server independently. When a node has remote cluster server disabled, + * an empty string is returned. + * + * @return cluster node remote cluster server endpoints + */ + String getRemoteClusterServerEndpoint(int index); + + /** + * Upgrades a single node to the given version. Method blocks until the node is back up and ready to respond to requests. + * + * @param index index of node ot upgrade + * @param version version to upgrade to + */ + void upgradeNodeToVersion(int index, Version version); + + /** + * Performs a "full cluster restart" upgrade to the given version. Method blocks until the cluster is restarted and available. + * + * @param version version to upgrade to + */ + void upgradeToVersion(Version version); + + /** + * Returns an {@link InputStream} for the given node log. + */ + InputStream getNodeLog(int index, LogType logType); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java index 2cca08c86b570..4c3608a181911 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java @@ -84,7 +84,6 @@ public static class LocalNodeSpec { private final Map extraConfigFiles; private final Map systemProperties; private final List jvmArgs; - private final Map secrets; private Version version; public LocalNodeSpec( @@ -105,8 +104,7 @@ public LocalNodeSpec( String keystorePassword, Map extraConfigFiles, Map systemProperties, - List jvmArgs, - Map secrets + List jvmArgs ) { this.cluster = cluster; this.name = name; @@ -126,7 +124,6 @@ public LocalNodeSpec( this.extraConfigFiles = extraConfigFiles; this.systemProperties = systemProperties; this.jvmArgs = jvmArgs; - this.secrets = secrets; } void setVersion(Version version) { @@ -189,10 +186,6 @@ public List getJvmArgs() { return jvmArgs; } - public Map getSecrets() { - return secrets; - } - public boolean isSecurityEnabled() { return Boolean.parseBoolean(getSetting("xpack.security.enabled", getVersion().onOrAfter("8.0.0") ? "true" : "false")); } @@ -310,8 +303,7 @@ private LocalNodeSpec getFilteredSpec(SettingsProvider filteredProvider, Setting n.keystorePassword, n.extraConfigFiles, n.systemProperties, - n.jvmArgs, - n.secrets + n.jvmArgs ) ) .toList(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java index b34c3e74838dc..e3b6b98d84755 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java @@ -111,12 +111,6 @@ interface LocalSpecBuilder> { */ T configFile(String fileName, Resource configFile); - /** - * Adds a secret to the local secure settings file. This should be used instead of {@link #keystore(String, String)} when file-based - * secure settings are enabled. - */ - T secret(String key, String value); - /** * Sets the version of Elasticsearch. Defaults to {@link Version#CURRENT}. */ diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index c6a634ebfd549..cdc3279a8a696 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -1,6 +1,7 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-test-artifact' import org.elasticsearch.gradle.internal.info.BuildParams diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java index ed019f5cb1317..44312a8cbd25e 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.eql; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.eql.EqlRestTestCase; import org.junit.ClassRule; @@ -23,10 +20,4 @@ public class EqlRestIT extends EqlRestTestCase { protected String getTestRestCluster() { return cluster.getHttpAddresses(); } - - @Override - protected Settings restClientSettings() { - String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } } diff --git a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java index 1aa72288a2e8a..66ea80dd1c65f 100644 --- a/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java +++ b/x-pack/plugin/eql/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlTestCluster.java @@ -14,7 +14,6 @@ public class EqlTestCluster { public static ElasticsearchCluster getCluster() { return ElasticsearchCluster.local() - .nodes(1) .distribution(DistributionType.DEFAULT) .setting("xpack.license.self_generated.type", "basic") .setting("xpack.monitoring.collection.enabled", "true") diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java index 0e5d862d4c860..e5c85eeffd998 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java @@ -19,7 +19,6 @@ public class EqlClientYamlIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .nodes(1) .distribution(DistributionType.DEFAULT) .setting("xpack.license.self_generated.type", "basic") .setting("xpack.security.enabled", "false") diff --git a/x-pack/plugin/security/qa/service-account/build.gradle b/x-pack/plugin/security/qa/service-account/build.gradle index b022f93eca8fe..67490a72c841d 100644 --- a/x-pack/plugin/security/qa/service-account/build.gradle +++ b/x-pack/plugin/security/qa/service-account/build.gradle @@ -8,7 +8,3 @@ dependencies { clusterModules(project(":modules:analysis-common")) clusterModules(project(":modules:rest-root")) } - -testArtifacts { - registerTestArtifactFromSourceSet(sourceSets.javaRestTest) -} From 886e35fa7650e8238ad182f59e67d388d5c29283 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 13 Sep 2023 14:43:06 -0500 Subject: [PATCH 050/114] Tracer requires io.opentelemetry.api (#99550) --- modules/apm/src/main/java/module-info.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/apm/src/main/java/module-info.java b/modules/apm/src/main/java/module-info.java index d99245304edf1..f0a89bb58afe1 100644 --- a/modules/apm/src/main/java/module-info.java +++ b/modules/apm/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires org.apache.logging.log4j; requires org.apache.lucene.core; requires io.opentelemetry.context; + requires io.opentelemetry.api; exports org.elasticsearch.tracing.apm; } From 81c90ff1ed97607984a7bef5a8bfad91a20c1139 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 13 Sep 2023 16:00:45 -0400 Subject: [PATCH 051/114] ESQL: Mark counter fields as unsupported (#99054) This marks TSDB's `counter` fields as unsupported by ESQL. We'll support them eventually, but in the short term we were just treating them like their underlying numeric type and that's not great. They have a conceptually different meaning that we'd like to respect one day. So, for now, we'll mark them unsupported. --- docs/changelog/99054.yaml | 5 + .../resources/rest-api-spec/test/40_tsdb.yml | 20 +++- .../test/45_non_tsdb_counter.yml | 112 ++++++++++++++++++ .../xpack/esql/action/EsqlQueryResponse.java | 2 +- .../xpack/esql/type/EsqlDataTypeRegistry.java | 9 +- .../xpack/esql/type/EsqlDataTypes.java | 2 +- .../esql/action/EsqlQueryResponseTests.java | 2 +- .../esql/type/EsqlDataTypeRegistryTests.java | 59 +++++++++ .../xpack/ql/index/IndexResolver.java | 29 ++++- .../xpack/ql/type/DataTypeRegistry.java | 4 +- .../ql/type/DefaultDataTypeRegistry.java | 4 +- .../elasticsearch/xpack/ql/type/Types.java | 4 +- .../xpack/sql/type/SqlDataTypeRegistry.java | 3 +- 13 files changed, 236 insertions(+), 19 deletions(-) create mode 100644 docs/changelog/99054.yaml create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/45_non_tsdb_counter.yml create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java diff --git a/docs/changelog/99054.yaml b/docs/changelog/99054.yaml new file mode 100644 index 0000000000000..a9e4128e7ae97 --- /dev/null +++ b/docs/changelog/99054.yaml @@ -0,0 +1,5 @@ +pr: 99054 +summary: "ESQL: Mark counter fields as unsupported" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml index f37c897d77b4b..a72205b3af064 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml @@ -32,8 +32,10 @@ setup: properties: tx: type: long + time_series_metric: counter rx: type: long + time_series_metric: counter - do: bulk: refresh: true @@ -70,9 +72,9 @@ load everything: - match: {columns.2.name: "k8s.pod.name"} - match: {columns.2.type: "keyword"} - match: {columns.3.name: "k8s.pod.network.rx"} - - match: {columns.3.type: "long"} + - match: {columns.3.type: "unsupported"} - match: {columns.4.name: "k8s.pod.network.tx"} - - match: {columns.4.type: "long"} + - match: {columns.4.type: "unsupported"} - match: {columns.5.name: "k8s.pod.uid"} - match: {columns.5.type: "keyword"} - match: {columns.6.name: "metricset"} @@ -84,14 +86,22 @@ load a document: - do: esql.query: body: - query: 'from test | where k8s.pod.network.tx == 1434577921' + query: 'from test | where @timestamp == "2021-04-28T18:50:23.142Z"' - length: {values: 1} - length: {values.0: 7} - match: {values.0.0: "2021-04-28T18:50:23.142Z"} - match: {values.0.1: "10.10.55.3"} - match: {values.0.2: "dog"} - - match: {values.0.3: 530600088} - - match: {values.0.4: 1434577921} + - match: {values.0.3: ""} + - match: {values.0.4: ""} - match: {values.0.5: "df3145b3-0563-4d3b-a0f7-897eb2876ea9"} - match: {values.0.6: "pod"} + +--- +filter on counter: + - do: + catch: /Cannot use field \[k8s.pod.network.tx\] with unsupported type \[counter\]/ + esql.query: + body: + query: 'from test | where k8s.pod.network.tx == 1434577921' diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/45_non_tsdb_counter.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/45_non_tsdb_counter.yml new file mode 100644 index 0000000000000..a4344946aea0d --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/45_non_tsdb_counter.yml @@ -0,0 +1,112 @@ +setup: + - do: + indices.create: + index: test + body: + settings: + index: + mode: standard + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + ip: + type: ip + network: + properties: + tx: + type: long + time_series_metric: counter + rx: + type: long + time_series_metric: counter + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2005177954, "rx": 801479970}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2006223737, "rx": 802337279}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.2", "network": {"tx": 2012916202, "rx": 803685721}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 1434521831, "rx": 530575198}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 1434577921, "rx": 530600088}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 1434587694, "rx": 530604797}}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 1434595272, "rx": 530605511}}}}' + +--- +load everything: + - do: + esql.query: + body: + query: 'from test' + + - match: {columns.0.name: "@timestamp"} + - match: {columns.0.type: "date"} + - match: {columns.1.name: "k8s.pod.ip"} + - match: {columns.1.type: "ip"} + - match: {columns.2.name: "k8s.pod.name"} + - match: {columns.2.type: "keyword"} + - match: {columns.3.name: "k8s.pod.network.rx"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "k8s.pod.network.tx"} + - match: {columns.4.type: "long"} + - match: {columns.5.name: "k8s.pod.uid"} + - match: {columns.5.type: "keyword"} + - match: {columns.6.name: "metricset"} + - match: {columns.6.type: "keyword"} + - length: {values: 8} + +--- +load a document: + - do: + esql.query: + body: + query: 'from test | where @timestamp == "2021-04-28T18:50:23.142Z"' + + - length: {values: 1} + - length: {values.0: 7} + - match: {values.0.0: "2021-04-28T18:50:23.142Z"} + - match: {values.0.1: "10.10.55.3"} + - match: {values.0.2: "dog"} + - match: {values.0.3: 530600088} + - match: {values.0.4: 1434577921} + - match: {values.0.5: "df3145b3-0563-4d3b-a0f7-897eb2876ea9"} + - match: {values.0.6: "pod"} + +--- +filter on counter: + - do: + esql.query: + body: + query: 'from test | where k8s.pod.network.tx == 1434577921' + + - length: {values: 1} + - length: {values.0: 7} + - match: {values.0.0: "2021-04-28T18:50:23.142Z"} + - match: {values.0.1: "10.10.55.3"} + - match: {values.0.2: "dog"} + - match: {values.0.3: 530600088} + - match: {values.0.4: 1434577921} + - match: {values.0.5: "df3145b3-0563-4d3b-a0f7-897eb2876ea9"} + - match: {values.0.6: "pod"} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 774208480c6ff..2b029a03fa9f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -247,7 +247,7 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef */ private static Page valuesToPage(List dataTypes, List> values) { List results = dataTypes.stream() - .map(c -> LocalExecutionPlanner.toElementType(EsqlDataTypes.fromEs(c)).newBlockBuilder(values.size())) + .map(c -> LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) .toList(); for (List row : values) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java index a0883fce4b79f..ff6e7f4aa2736 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.type; +import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypeConverter; import org.elasticsearch.xpack.ql.type.DataTypeRegistry; @@ -29,8 +30,12 @@ public Collection dataTypes() { } @Override - public DataType fromEs(String typeName) { - return EsqlDataTypes.fromEs(typeName); + public DataType fromEs(String typeName, TimeSeriesParams.MetricType metricType) { + if (metricType == TimeSeriesParams.MetricType.COUNTER) { + // Counter fields will be a counter type, for now they are unsupported + return DataTypes.UNSUPPORTED; + } + return EsqlDataTypes.fromName(typeName); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index 64358a3435e1f..b8ba722b989ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -87,7 +87,7 @@ public static DataType fromTypeName(String name) { return NAME_TO_TYPE.get(name.toLowerCase(Locale.ROOT)); } - public static DataType fromEs(String name) { + public static DataType fromName(String name) { DataType type = ES_TO_TYPE.get(name); return type != null ? type : UNSUPPORTED; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 07899000cba4a..5bf8df1c3fd0b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -72,7 +72,7 @@ private ColumnInfo randomColumnInfo() { private Page randomPage(List columns) { return new Page(columns.stream().map(c -> { - Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromEs(c.type())).newBlockBuilder(1); + Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1); switch (c.type()) { case "unsigned_long", "long" -> ((LongBlock.Builder) builder).appendLong(randomLong()); case "integer" -> ((IntBlock.Builder) builder).appendInt(randomInt()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java new file mode 100644 index 0000000000000..3620fcc8c5926 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.type; + +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.index.mapper.TimeSeriesParams; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.index.IndexResolution; +import org.elasticsearch.xpack.ql.index.IndexResolver; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.type.EsField; + +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EsqlDataTypeRegistryTests extends ESTestCase { + public void testCounter() { + resolve("long", TimeSeriesParams.MetricType.COUNTER, DataTypes.UNSUPPORTED); + } + + public void testGauge() { + resolve("long", TimeSeriesParams.MetricType.GAUGE, DataTypes.LONG); + } + + public void testLong() { + resolve("long", null, DataTypes.LONG); + } + + private void resolve(String esTypeName, TimeSeriesParams.MetricType metricType, DataType expected) { + String[] indices = new String[] { "idx-" + randomAlphaOfLength(5) }; + FieldCapabilities fieldCap = new FieldCapabilities( + randomAlphaOfLength(3), + esTypeName, + false, + true, + true, + false, + metricType, + indices, + null, + null, + null, + null, + Map.of() + ); + FieldCapabilitiesResponse caps = new FieldCapabilitiesResponse(indices, Map.of(fieldCap.getName(), Map.of(esTypeName, fieldCap))); + IndexResolution resolution = IndexResolver.mergedMappings(EsqlDataTypeRegistry.INSTANCE, "idx-*", caps); + + EsField f = resolution.get().mapping().get(fieldCap.getName()); + assertThat(f.getDataType(), equalTo(expected)); + } +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java index 16d245bb93de5..c0c3068b4d98b 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolver.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.type.DataType; @@ -453,7 +454,7 @@ private static EsField createField( // lack of parent implies the field is an alias if (map == null) { // as such, create the field manually, marking the field to also be an alias - fieldFunction = s -> createField(typeRegistry, s, OBJECT.esType(), new TreeMap<>(), false, true); + fieldFunction = s -> createField(typeRegistry, s, OBJECT.esType(), null, new TreeMap<>(), false, true); } else { Iterator iterator = map.values().iterator(); FieldCapabilities parentCap = iterator.next(); @@ -461,7 +462,15 @@ private static EsField createField( parentCap = iterator.next(); } final FieldCapabilities parentC = parentCap; - fieldFunction = s -> createField(typeRegistry, s, parentC.getType(), new TreeMap<>(), parentC.isAggregatable(), false); + fieldFunction = s -> createField( + typeRegistry, + s, + parentC.getType(), + parentC.getMetricType(), + new TreeMap<>(), + parentC.isAggregatable(), + false + ); } parent = createField(typeRegistry, parentName, globalCaps, hierarchicalMapping, flattedMapping, fieldFunction); @@ -495,11 +504,12 @@ private static EsField createField( DataTypeRegistry typeRegistry, String fieldName, String typeName, + TimeSeriesParams.MetricType metricType, Map props, boolean isAggregateable, boolean isAlias ) { - DataType esType = typeRegistry.fromEs(typeName); + DataType esType = typeRegistry.fromEs(typeName, metricType); if (esType == TEXT) { return new TextEsField(fieldName, props, false, isAlias); @@ -514,7 +524,8 @@ private static EsField createField( return DateEsField.dateEsField(fieldName, props, isAggregateable); } if (esType == UNSUPPORTED) { - return new UnsupportedEsField(fieldName, typeName, null, props); + String originalType = metricType == TimeSeriesParams.MetricType.COUNTER ? "counter" : typeName; + return new UnsupportedEsField(fieldName, originalType, null, props); } return new EsField(fieldName, esType, props, isAggregateable, isAlias); @@ -727,7 +738,15 @@ private static void createField( indexFields.flattedMapping, s -> invalidField != null ? invalidField - : createField(typeRegistry, s, typeCap.getType(), emptyMap(), typeCap.isAggregatable(), isAliasFieldType.get()) + : createField( + typeRegistry, + s, + typeCap.getType(), + typeCap.getMetricType(), + emptyMap(), + typeCap.isAggregatable(), + isAliasFieldType.get() + ) ); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeRegistry.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeRegistry.java index 6989d4572a974..712b7df103947 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeRegistry.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeRegistry.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ql.type; +import org.elasticsearch.index.mapper.TimeSeriesParams; + import java.util.Collection; /** @@ -19,7 +21,7 @@ public interface DataTypeRegistry { // Collection dataTypes(); - DataType fromEs(String typeName); + DataType fromEs(String typeName, TimeSeriesParams.MetricType metricType); DataType fromJava(Object value); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DefaultDataTypeRegistry.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DefaultDataTypeRegistry.java index 3c37998191eaa..472f51870d01c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DefaultDataTypeRegistry.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DefaultDataTypeRegistry.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ql.type; +import org.elasticsearch.index.mapper.TimeSeriesParams; + import java.util.Collection; public class DefaultDataTypeRegistry implements DataTypeRegistry { @@ -21,7 +23,7 @@ public Collection dataTypes() { } @Override - public DataType fromEs(String typeName) { + public DataType fromEs(String typeName, TimeSeriesParams.MetricType metricType) { return DataTypes.fromEs(typeName); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/Types.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/Types.java index c202e797b7566..a19f4c634f77c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/Types.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/Types.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.Booleans; +import org.elasticsearch.index.mapper.TimeSeriesParams; import java.util.Collections; import java.util.LinkedHashMap; @@ -52,8 +53,9 @@ private static DataType getType(DataTypeRegistry typeRegistry, Map dataTypes() { } @Override - public DataType fromEs(String typeName) { + public DataType fromEs(String typeName, TimeSeriesParams.MetricType metricType) { return SqlDataTypes.fromEs(typeName); } From 816583049dd453d4bc97229f8b4f274b11782db2 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 13 Sep 2023 16:21:28 -0400 Subject: [PATCH 052/114] [buildkite] Re-enable platform-support periodic pipelines (#99552) --- .buildkite/scripts/periodic.trigger.sh | 36 +++++++++++++++--------- catalog-info.yaml | 39 -------------------------- 2 files changed, 22 insertions(+), 53 deletions(-) mode change 100644 => 100755 .buildkite/scripts/periodic.trigger.sh diff --git a/.buildkite/scripts/periodic.trigger.sh b/.buildkite/scripts/periodic.trigger.sh old mode 100644 new mode 100755 index 36d106e87ee9c..754c701927185 --- a/.buildkite/scripts/periodic.trigger.sh +++ b/.buildkite/scripts/periodic.trigger.sh @@ -2,8 +2,6 @@ set -euo pipefail -exit 0 - echo "steps:" source .buildkite/scripts/branches.sh @@ -14,18 +12,6 @@ for BRANCH in "${BRANCHES[@]}"; do LAST_GOOD_COMMIT=$(echo "${BUILD_JSON}" | jq -r '.commit') cat < Date: Wed, 13 Sep 2023 16:27:50 -0400 Subject: [PATCH 053/114] ESQL: Compact topn (#99316) This lowers topn's memory usage somewhat and makes it easier to track the memory usage. That looks like: ``` "status" : { "occupied_rows" : 10000, "ram_bytes_used" : 255392224, "ram_used" : "243.5mb" } ``` In some cases the memory usage savings is significant. In an example with many, many keys the memory usage of each row drops from `58kb` to `25kb`. This is a little degenerate though and I expect the savings to normally be on the order of 10%. The real advantage is memory tracking. It's *easy* to track used memory. And, in a followup, it should be fairly easy to track circuit break the used memory. Mostly this is done by adding new abstractions and moving existing abstractions to top level classes with tests and stuff. * `TopNEncoder` is now a top level class. It has grown the ability to *decode* values as well as encode them. And it has grown "unsortable" versions which don't write their values such that sorting the bytes sorts the values. We use the "unsortable" versions when writing values. * `KeyExtractor` extracts keys from the blocks and writes them to the row's `BytesRefBuilder`. This is basically objects replacing one of switch statements in `RowFactory`. They are more scattered but easier to test, and hopefully `TopNOperator` is more readable with this behavior factored out. Also! Most implementations are automatically generated. * `ValueExtractor` extracts values from the blocks and writes them to the row's `BytesRefBuilder`. This replaces the other switch statement in `RowFactory` for the same reasons, except instead of writing to many arrays it writes to a `BytesRefBuilder` just like the key as compactly as it can manage. The memory savings comes from three changes: 1. Lower overhead for storing values by encoding them rather than using many primitive arrays. 2. Encode the value count as a vint rather than a whole int. Usually there are very few rows and vint encodes that quite nicely. 3. Don't write values that are in the key for single-valued fields. Instead we read them from the key. That's going to be very very common. This is unlikely to be faster than the old code. I haven't really tried for speed. Just memory usage and accountability. Once we get good accounting we can try and make this faster. I expect we'll have to figure out the megamorphic invocations I've added. But, for now, they help more than they hurt. --- .../compute/operator/TopNBenchmark.java | 23 +- .../operator/ValuesSourceReaderBenchmark.java | 2 +- docs/changelog/99316.yaml | 5 + x-pack/plugin/esql/compute/build.gradle | 78 ++ .../operator/topn/KeyExtractorForBoolean.java | 148 ++++ .../topn/KeyExtractorForBytesRef.java | 162 ++++ .../operator/topn/KeyExtractorForDouble.java | 146 ++++ .../operator/topn/KeyExtractorForInt.java | 146 ++++ .../operator/topn/KeyExtractorForLong.java | 146 ++++ .../topn/ResultBuilderForBoolean.java | 66 ++ .../topn/ResultBuilderForBytesRef.java | 70 ++ .../operator/topn/ResultBuilderForDouble.java | 66 ++ .../operator/topn/ResultBuilderForInt.java | 66 ++ .../operator/topn/ResultBuilderForLong.java | 66 ++ .../topn/ValueExtractorForBoolean.java | 80 ++ .../topn/ValueExtractorForBytesRef.java | 85 +++ .../topn/ValueExtractorForDouble.java | 80 ++ .../operator/topn/ValueExtractorForInt.java | 80 ++ .../operator/topn/ValueExtractorForLong.java | 80 ++ .../compute/src/main/java/module-info.java | 1 + .../elasticsearch/compute/data/DocBlock.java | 13 + .../elasticsearch/compute/data/DocVector.java | 16 + .../operator/FixedLengthTopNEncoder.java | 24 - .../compute/operator/TopNEncoder.java | 44 -- .../compute/operator/TopNOperator.java | 699 ------------------ .../compute/operator/UTF8TopNEncoder.java | 35 - .../topn/DefaultSortableTopNEncoder.java | 38 + .../topn/DefaultUnsortableTopNEncoder.java | 181 +++++ .../operator/topn/FixedLengthTopNEncoder.java | 56 ++ .../compute/operator/topn/KeyExtractor.java | 42 ++ .../operator/topn/KeyExtractorForNull.java | 29 + .../compute/operator/topn/ResultBuilder.java | 55 ++ .../operator/topn/ResultBuilderForDoc.java | 54 ++ .../operator/topn/ResultBuilderForNull.java | 39 + .../operator/topn/SortableTopNEncoder.java | 87 +++ .../compute/operator/topn/TopNEncoder.java | 75 ++ .../compute/operator/topn/TopNOperator.java | 440 +++++++++++ .../operator/topn/TopNOperatorStatus.java | 80 ++ .../operator/topn/UTF8TopNEncoder.java | 124 ++++ .../compute/operator/topn/ValueExtractor.java | 44 ++ .../operator/topn/ValueExtractorForDoc.java | 32 + .../operator/topn/ValueExtractorForNull.java | 26 + .../operator/topn/VersionTopNEncoder.java | 55 ++ .../operator/topn/X-KeyExtractor.java.st | 224 ++++++ .../operator/topn/X-ResultBuilder.java.st | 84 +++ .../operator/topn/X-ValueExtractor.java.st | 105 +++ .../DefaultUnsortableTopNEncoderTests.java | 49 ++ .../compute/operator/topn/ExtractorTests.java | 177 +++++ .../operator/topn/TopNEncoderTests.java | 131 ++++ .../topn/TopNOperatorStatusTests.java | 48 ++ .../{ => topn}/TopNOperatorTests.java | 657 ++++++++-------- .../compute/operator/topn/TopNRowTests.java | 37 + .../xpack/esql/planner/DefaultLayout.java | 28 +- .../xpack/esql/planner/ExchangeLayout.java | 8 +- .../xpack/esql/planner/Layout.java | 31 +- .../esql/planner/LocalExecutionPlanner.java | 55 +- .../TestPhysicalOperationProviders.java | 35 +- 57 files changed, 4339 insertions(+), 1214 deletions(-) create mode 100644 docs/changelog/99316.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FixedLengthTopNEncoder.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNEncoder.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNOperator.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/UTF8TopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultSortableTopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/FixedLengthTopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractor.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractorForNull.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/SortableTopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/UTF8TopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractor.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForDoc.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForNull.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/VersionTopNEncoder.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoderTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java rename x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/{ => topn}/TopNOperatorTests.java (68%) create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java index c53d08b063ba9..f4b5397e55d39 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java @@ -13,11 +13,13 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.TopNOperator; +import org.elasticsearch.compute.operator.topn.TopNEncoder; +import org.elasticsearch.compute.operator.topn.TopNOperator; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -77,8 +79,27 @@ private static Operator operator(String data, int topCount) { case TWO_LONGS, LONGS_AND_BYTES_REFS -> 2; default -> throw new IllegalArgumentException("unsupported data type [" + data + "]"); }; + List elementTypes = switch (data) { + case LONGS -> List.of(ElementType.LONG); + case INTS -> List.of(ElementType.INT); + case DOUBLES -> List.of(ElementType.DOUBLE); + case BOOLEANS -> List.of(ElementType.BOOLEAN); + case BYTES_REFS -> List.of(ElementType.BYTES_REF); + case TWO_LONGS -> List.of(ElementType.INT, ElementType.INT); + case LONGS_AND_BYTES_REFS -> List.of(ElementType.INT, ElementType.BYTES_REF); + default -> throw new IllegalArgumentException("unsupported data type [" + data + "]"); + }; + List encoders = switch (data) { + case LONGS, INTS, DOUBLES, BOOLEANS -> List.of(TopNEncoder.DEFAULT_SORTABLE); + case BYTES_REFS -> List.of(TopNEncoder.UTF8); + case TWO_LONGS -> List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.DEFAULT_SORTABLE); + case LONGS_AND_BYTES_REFS -> List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.UTF8); + default -> throw new IllegalArgumentException("unsupported data type [" + data + "]"); + }; return new TopNOperator( topCount, + elementTypes, + encoders, IntStream.range(0, count).mapToObj(c -> new TopNOperator.SortOrder(c, false, false)).toList(), 16 * 1024 ); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java index 0d9fcad984cbb..9c527923fae02 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java @@ -33,7 +33,7 @@ import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ValueSourceInfo; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; -import org.elasticsearch.compute.operator.TopNOperator; +import org.elasticsearch.compute.operator.topn.TopNOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; diff --git a/docs/changelog/99316.yaml b/docs/changelog/99316.yaml new file mode 100644 index 0000000000000..78857b433b385 --- /dev/null +++ b/docs/changelog/99316.yaml @@ -0,0 +1,5 @@ +pr: 99316 +summary: "ESQL: Compact topn" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index d6a27b4122edb..6058770f25d1b 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -396,4 +396,82 @@ tasks.named('stringTemplates').configure { it.inputFile = multivalueDedupeInputFile it.outputFile = "org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java" } + File keyExtractorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st") + template { + it.properties = bytesRefProperties + it.inputFile = keyExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = keyExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java" + } + template { + it.properties = intProperties + it.inputFile = keyExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java" + } + template { + it.properties = longProperties + it.inputFile = keyExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = keyExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java" + } + File valueExtractorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st") + template { + it.properties = bytesRefProperties + it.inputFile = valueExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = valueExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java" + } + template { + it.properties = intProperties + it.inputFile = valueExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java" + } + template { + it.properties = longProperties + it.inputFile = valueExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = valueExtractorInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java" + } + File resultBuilderInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st") + template { + it.properties = bytesRefProperties + it.inputFile = resultBuilderInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = resultBuilderInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java" + } + template { + it.properties = intProperties + it.inputFile = resultBuilderInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java" + } + template { + it.properties = longProperties + it.inputFile = resultBuilderInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = resultBuilderInputFile + it.outputFile = "org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java" + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java new file mode 100644 index 0000000000000..0f7a4e109af75 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; + +abstract class KeyExtractorForBoolean implements KeyExtractor { + static KeyExtractorForBoolean extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, BooleanBlock block) { + BooleanVector v = block.asVector(); + if (v != null) { + return new KeyExtractorForBoolean.ForVector(encoder, nul, nonNul, v); + } + if (ascending) { + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForBoolean.MinForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForBoolean.MinForUnordered(encoder, nul, nonNul, block); + } + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForBoolean.MaxForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForBoolean.MaxForUnordered(encoder, nul, nonNul, block); + } + + private final byte nul; + private final byte nonNul; + + KeyExtractorForBoolean(TopNEncoder encoder, byte nul, byte nonNul) { + assert encoder == TopNEncoder.DEFAULT_SORTABLE; + this.nul = nul; + this.nonNul = nonNul; + } + + protected final int nonNul(BytesRefBuilder key, boolean value) { + key.append(nonNul); + TopNEncoder.DEFAULT_SORTABLE.encodeBoolean(value, key); + return Byte.BYTES + 1; + } + + protected final int nul(BytesRefBuilder key) { + key.append(nul); + return 1; + } + + static class ForVector extends KeyExtractorForBoolean { + private final BooleanVector vector; + + ForVector(TopNEncoder encoder, byte nul, byte nonNul, BooleanVector vector) { + super(encoder, nul, nonNul); + this.vector = vector; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + return nonNul(key, vector.getBoolean(position)); + } + } + + static class MinForAscending extends KeyExtractorForBoolean { + private final BooleanBlock block; + + MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getBoolean(block.getFirstValueIndex(position))); + } + } + + static class MaxForAscending extends KeyExtractorForBoolean { + private final BooleanBlock block; + + MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getBoolean(block.getFirstValueIndex(position) + block.getValueCount(position) - 1)); + } + } + + static class MinForUnordered extends KeyExtractorForBoolean { + private final BooleanBlock block; + + MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + if (block.getBoolean(i) == false) { + return nonNul(key, false); + } + } + return nonNul(key, true); + } + } + + static class MaxForUnordered extends KeyExtractorForBoolean { + private final BooleanBlock block; + + MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + if (block.getBoolean(i)) { + return nonNul(key, true); + } + } + return nonNul(key, false); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java new file mode 100644 index 0000000000000..d9d8d3878817e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; + +abstract class KeyExtractorForBytesRef implements KeyExtractor { + static KeyExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, BytesRefBlock block) { + BytesRefVector v = block.asVector(); + if (v != null) { + return new KeyExtractorForBytesRef.ForVector(encoder, nul, nonNul, v); + } + if (ascending) { + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForBytesRef.MinForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForBytesRef.MinForUnordered(encoder, nul, nonNul, block); + } + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForBytesRef.MaxForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForBytesRef.MaxForUnordered(encoder, nul, nonNul, block); + } + + private final TopNEncoder encoder; + protected final BytesRef scratch = new BytesRef(); + private final byte nul; + private final byte nonNul; + + KeyExtractorForBytesRef(TopNEncoder encoder, byte nul, byte nonNul) { + this.encoder = encoder; + this.nul = nul; + this.nonNul = nonNul; + } + + protected final int nonNul(BytesRefBuilder key, BytesRef value) { + key.append(nonNul); + return encoder.encodeBytesRef(value, key) + 1; + } + + protected final int nul(BytesRefBuilder key) { + key.append(nul); + return 1; + } + + static class ForVector extends KeyExtractorForBytesRef { + private final BytesRefVector vector; + + ForVector(TopNEncoder encoder, byte nul, byte nonNul, BytesRefVector vector) { + super(encoder, nul, nonNul); + this.vector = vector; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + return nonNul(key, vector.getBytesRef(position, scratch)); + } + } + + static class MinForAscending extends KeyExtractorForBytesRef { + private final BytesRefBlock block; + + MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getBytesRef(block.getFirstValueIndex(position), scratch)); + } + } + + static class MaxForAscending extends KeyExtractorForBytesRef { + private final BytesRefBlock block; + + MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getBytesRef(block.getFirstValueIndex(position) + block.getValueCount(position) - 1, scratch)); + } + } + + static class MinForUnordered extends KeyExtractorForBytesRef { + private final BytesRefBlock block; + + private final BytesRef minScratch = new BytesRef(); + + MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + BytesRef min = block.getBytesRef(start, minScratch); + for (int i = start; i < end; i++) { + BytesRef v = block.getBytesRef(i, scratch); + if (v.compareTo(min) < 0) { + min.bytes = v.bytes; + min.offset = v.offset; + min.length = v.length; + } + } + return nonNul(key, min); + } + } + + static class MaxForUnordered extends KeyExtractorForBytesRef { + private final BytesRefBlock block; + + private final BytesRef maxScratch = new BytesRef(); + + MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + BytesRef max = block.getBytesRef(start, maxScratch); + for (int i = start; i < end; i++) { + BytesRef v = block.getBytesRef(i, scratch); + if (v.compareTo(max) > 0) { + max.bytes = v.bytes; + max.offset = v.offset; + max.length = v.length; + } + } + return nonNul(key, max); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java new file mode 100644 index 0000000000000..8d8458d33ab47 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; + +abstract class KeyExtractorForDouble implements KeyExtractor { + static KeyExtractorForDouble extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, DoubleBlock block) { + DoubleVector v = block.asVector(); + if (v != null) { + return new KeyExtractorForDouble.ForVector(encoder, nul, nonNul, v); + } + if (ascending) { + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForDouble.MinForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForDouble.MinForUnordered(encoder, nul, nonNul, block); + } + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForDouble.MaxForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForDouble.MaxForUnordered(encoder, nul, nonNul, block); + } + + private final byte nul; + private final byte nonNul; + + KeyExtractorForDouble(TopNEncoder encoder, byte nul, byte nonNul) { + assert encoder == TopNEncoder.DEFAULT_SORTABLE; + this.nul = nul; + this.nonNul = nonNul; + } + + protected final int nonNul(BytesRefBuilder key, double value) { + key.append(nonNul); + TopNEncoder.DEFAULT_SORTABLE.encodeDouble(value, key); + return Double.BYTES + 1; + } + + protected final int nul(BytesRefBuilder key) { + key.append(nul); + return 1; + } + + static class ForVector extends KeyExtractorForDouble { + private final DoubleVector vector; + + ForVector(TopNEncoder encoder, byte nul, byte nonNul, DoubleVector vector) { + super(encoder, nul, nonNul); + this.vector = vector; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + return nonNul(key, vector.getDouble(position)); + } + } + + static class MinForAscending extends KeyExtractorForDouble { + private final DoubleBlock block; + + MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getDouble(block.getFirstValueIndex(position))); + } + } + + static class MaxForAscending extends KeyExtractorForDouble { + private final DoubleBlock block; + + MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getDouble(block.getFirstValueIndex(position) + block.getValueCount(position) - 1)); + } + } + + static class MinForUnordered extends KeyExtractorForDouble { + private final DoubleBlock block; + + MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + double min = block.getDouble(start); + for (int i = start + 1; i < end; i++) { + min = Math.min(min, block.getDouble(i)); + } + return nonNul(key, min); + } + } + + static class MaxForUnordered extends KeyExtractorForDouble { + private final DoubleBlock block; + + MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + double max = block.getDouble(start); + for (int i = start + 1; i < end; i++) { + max = Math.max(max, block.getDouble(i)); + } + return nonNul(key, max); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java new file mode 100644 index 0000000000000..9c20f53689b0a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; + +abstract class KeyExtractorForInt implements KeyExtractor { + static KeyExtractorForInt extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, IntBlock block) { + IntVector v = block.asVector(); + if (v != null) { + return new KeyExtractorForInt.ForVector(encoder, nul, nonNul, v); + } + if (ascending) { + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForInt.MinForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForInt.MinForUnordered(encoder, nul, nonNul, block); + } + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForInt.MaxForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForInt.MaxForUnordered(encoder, nul, nonNul, block); + } + + private final byte nul; + private final byte nonNul; + + KeyExtractorForInt(TopNEncoder encoder, byte nul, byte nonNul) { + assert encoder == TopNEncoder.DEFAULT_SORTABLE; + this.nul = nul; + this.nonNul = nonNul; + } + + protected final int nonNul(BytesRefBuilder key, int value) { + key.append(nonNul); + TopNEncoder.DEFAULT_SORTABLE.encodeInt(value, key); + return Integer.BYTES + 1; + } + + protected final int nul(BytesRefBuilder key) { + key.append(nul); + return 1; + } + + static class ForVector extends KeyExtractorForInt { + private final IntVector vector; + + ForVector(TopNEncoder encoder, byte nul, byte nonNul, IntVector vector) { + super(encoder, nul, nonNul); + this.vector = vector; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + return nonNul(key, vector.getInt(position)); + } + } + + static class MinForAscending extends KeyExtractorForInt { + private final IntBlock block; + + MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getInt(block.getFirstValueIndex(position))); + } + } + + static class MaxForAscending extends KeyExtractorForInt { + private final IntBlock block; + + MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getInt(block.getFirstValueIndex(position) + block.getValueCount(position) - 1)); + } + } + + static class MinForUnordered extends KeyExtractorForInt { + private final IntBlock block; + + MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + int min = block.getInt(start); + for (int i = start + 1; i < end; i++) { + min = Math.min(min, block.getInt(i)); + } + return nonNul(key, min); + } + } + + static class MaxForUnordered extends KeyExtractorForInt { + private final IntBlock block; + + MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + int max = block.getInt(start); + for (int i = start + 1; i < end; i++) { + max = Math.max(max, block.getInt(i)); + } + return nonNul(key, max); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java new file mode 100644 index 0000000000000..5ad6c8d9602a8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; + +abstract class KeyExtractorForLong implements KeyExtractor { + static KeyExtractorForLong extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, LongBlock block) { + LongVector v = block.asVector(); + if (v != null) { + return new KeyExtractorForLong.ForVector(encoder, nul, nonNul, v); + } + if (ascending) { + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForLong.MinForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForLong.MinForUnordered(encoder, nul, nonNul, block); + } + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorForLong.MaxForAscending(encoder, nul, nonNul, block) + : new KeyExtractorForLong.MaxForUnordered(encoder, nul, nonNul, block); + } + + private final byte nul; + private final byte nonNul; + + KeyExtractorForLong(TopNEncoder encoder, byte nul, byte nonNul) { + assert encoder == TopNEncoder.DEFAULT_SORTABLE; + this.nul = nul; + this.nonNul = nonNul; + } + + protected final int nonNul(BytesRefBuilder key, long value) { + key.append(nonNul); + TopNEncoder.DEFAULT_SORTABLE.encodeLong(value, key); + return Long.BYTES + 1; + } + + protected final int nul(BytesRefBuilder key) { + key.append(nul); + return 1; + } + + static class ForVector extends KeyExtractorForLong { + private final LongVector vector; + + ForVector(TopNEncoder encoder, byte nul, byte nonNul, LongVector vector) { + super(encoder, nul, nonNul); + this.vector = vector; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + return nonNul(key, vector.getLong(position)); + } + } + + static class MinForAscending extends KeyExtractorForLong { + private final LongBlock block; + + MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getLong(block.getFirstValueIndex(position))); + } + } + + static class MaxForAscending extends KeyExtractorForLong { + private final LongBlock block; + + MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } + return nonNul(key, block.getLong(block.getFirstValueIndex(position) + block.getValueCount(position) - 1)); + } + } + + static class MinForUnordered extends KeyExtractorForLong { + private final LongBlock block; + + MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + long min = block.getLong(start); + for (int i = start + 1; i < end; i++) { + min = Math.min(min, block.getLong(i)); + } + return nonNul(key, min); + } + } + + static class MaxForUnordered extends KeyExtractorForLong { + private final LongBlock block; + + MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; + long max = block.getLong(start); + for (int i = start + 1; i < end; i++) { + max = Math.max(max, block.getLong(i)); + } + return nonNul(key, max); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java new file mode 100644 index 0000000000000..50cef0417dd45 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BooleanBlock; + +class ResultBuilderForBoolean implements ResultBuilder { + private final BooleanBlock.Builder builder; + + private final boolean inKey; + + /** + * The value previously set by {@link #decodeKey}. + */ + private boolean key; + + ResultBuilderForBoolean(TopNEncoder encoder, boolean inKey, int initialSize) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + this.builder = BooleanBlock.newBlockBuilder(initialSize); + } + + @Override + public void decodeKey(BytesRef keys) { + assert inKey; + key = TopNEncoder.DEFAULT_SORTABLE.decodeBoolean(keys); + } + + @Override + public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + switch (count) { + case 0 -> { + builder.appendNull(); + } + case 1 -> builder.appendBoolean(inKey ? key : readValueFromValues(values)); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.appendBoolean(readValueFromValues(values)); + } + builder.endPositionEntry(); + } + } + } + + private boolean readValueFromValues(BytesRef values) { + return TopNEncoder.DEFAULT_UNSORTABLE.decodeBoolean(values); + } + + @Override + public BooleanBlock build() { + return builder.build(); + } + + @Override + public String toString() { + return "ResultBuilderForBoolean[inKey=" + inKey + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java new file mode 100644 index 0000000000000..55f324c931b67 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BytesRefBlock; + +class ResultBuilderForBytesRef implements ResultBuilder { + private final BytesRefBlock.Builder builder; + + private final boolean inKey; + + private final TopNEncoder encoder; + + private final BytesRef scratch = new BytesRef(); + + /** + * The value previously set by {@link #decodeKey}. + */ + private BytesRef key; + + ResultBuilderForBytesRef(TopNEncoder encoder, boolean inKey, int initialSize) { + this.encoder = encoder; + this.inKey = inKey; + this.builder = BytesRefBlock.newBlockBuilder(initialSize); + } + + @Override + public void decodeKey(BytesRef keys) { + assert inKey; + key = encoder.toSortable().decodeBytesRef(keys, scratch); + } + + @Override + public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + switch (count) { + case 0 -> { + builder.appendNull(); + } + case 1 -> builder.appendBytesRef(inKey ? key : readValueFromValues(values)); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.appendBytesRef(readValueFromValues(values)); + } + builder.endPositionEntry(); + } + } + } + + private BytesRef readValueFromValues(BytesRef values) { + return encoder.toUnsortable().decodeBytesRef(values, scratch); + } + + @Override + public BytesRefBlock build() { + return builder.build(); + } + + @Override + public String toString() { + return "ResultBuilderForBytesRef[inKey=" + inKey + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java new file mode 100644 index 0000000000000..ed4a9b45d90dc --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.DoubleBlock; + +class ResultBuilderForDouble implements ResultBuilder { + private final DoubleBlock.Builder builder; + + private final boolean inKey; + + /** + * The value previously set by {@link #decodeKey}. + */ + private double key; + + ResultBuilderForDouble(TopNEncoder encoder, boolean inKey, int initialSize) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + this.builder = DoubleBlock.newBlockBuilder(initialSize); + } + + @Override + public void decodeKey(BytesRef keys) { + assert inKey; + key = TopNEncoder.DEFAULT_SORTABLE.decodeDouble(keys); + } + + @Override + public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + switch (count) { + case 0 -> { + builder.appendNull(); + } + case 1 -> builder.appendDouble(inKey ? key : readValueFromValues(values)); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.appendDouble(readValueFromValues(values)); + } + builder.endPositionEntry(); + } + } + } + + private double readValueFromValues(BytesRef values) { + return TopNEncoder.DEFAULT_UNSORTABLE.decodeDouble(values); + } + + @Override + public DoubleBlock build() { + return builder.build(); + } + + @Override + public String toString() { + return "ResultBuilderForDouble[inKey=" + inKey + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java new file mode 100644 index 0000000000000..2bcfc81107445 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.IntBlock; + +class ResultBuilderForInt implements ResultBuilder { + private final IntBlock.Builder builder; + + private final boolean inKey; + + /** + * The value previously set by {@link #decodeKey}. + */ + private int key; + + ResultBuilderForInt(TopNEncoder encoder, boolean inKey, int initialSize) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + this.builder = IntBlock.newBlockBuilder(initialSize); + } + + @Override + public void decodeKey(BytesRef keys) { + assert inKey; + key = TopNEncoder.DEFAULT_SORTABLE.decodeInt(keys); + } + + @Override + public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + switch (count) { + case 0 -> { + builder.appendNull(); + } + case 1 -> builder.appendInt(inKey ? key : readValueFromValues(values)); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.appendInt(readValueFromValues(values)); + } + builder.endPositionEntry(); + } + } + } + + private int readValueFromValues(BytesRef values) { + return TopNEncoder.DEFAULT_UNSORTABLE.decodeInt(values); + } + + @Override + public IntBlock build() { + return builder.build(); + } + + @Override + public String toString() { + return "ResultBuilderForInt[inKey=" + inKey + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java new file mode 100644 index 0000000000000..3ada85bf9d5c9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.LongBlock; + +class ResultBuilderForLong implements ResultBuilder { + private final LongBlock.Builder builder; + + private final boolean inKey; + + /** + * The value previously set by {@link #decodeKey}. + */ + private long key; + + ResultBuilderForLong(TopNEncoder encoder, boolean inKey, int initialSize) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + this.builder = LongBlock.newBlockBuilder(initialSize); + } + + @Override + public void decodeKey(BytesRef keys) { + assert inKey; + key = TopNEncoder.DEFAULT_SORTABLE.decodeLong(keys); + } + + @Override + public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + switch (count) { + case 0 -> { + builder.appendNull(); + } + case 1 -> builder.appendLong(inKey ? key : readValueFromValues(values)); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.appendLong(readValueFromValues(values)); + } + builder.endPositionEntry(); + } + } + } + + private long readValueFromValues(BytesRef values) { + return TopNEncoder.DEFAULT_UNSORTABLE.decodeLong(values); + } + + @Override + public LongBlock build() { + return builder.build(); + } + + @Override + public String toString() { + return "ResultBuilderForLong[inKey=" + inKey + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java new file mode 100644 index 0000000000000..0136c795746d0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; + +abstract class ValueExtractorForBoolean implements ValueExtractor { + static ValueExtractorForBoolean extractorFor(TopNEncoder encoder, boolean inKey, BooleanBlock block) { + BooleanVector vector = block.asVector(); + if (vector != null) { + return new ValueExtractorForBoolean.ForVector(encoder, inKey, vector); + } + return new ValueExtractorForBoolean.ForBlock(encoder, inKey, block); + } + + protected final boolean inKey; + + ValueExtractorForBoolean(TopNEncoder encoder, boolean inKey) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + } + + protected final void writeCount(BytesRefBuilder values, int count) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values); + } + + protected final void actualWriteValue(BytesRefBuilder values, boolean value) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(value, values); + } + + static class ForVector extends ValueExtractorForBoolean { + private final BooleanVector vector; + + ForVector(TopNEncoder encoder, boolean inKey, BooleanVector vector) { + super(encoder, inKey); + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + writeCount(values, 1); + if (inKey) { + // will read results from the key + return; + } + actualWriteValue(values, vector.getBoolean(position)); + } + } + + static class ForBlock extends ValueExtractorForBoolean { + private final BooleanBlock block; + + ForBlock(TopNEncoder encoder, boolean inKey, BooleanBlock block) { + super(encoder, inKey); + this.block = block; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + int size = block.getValueCount(position); + writeCount(values, size); + if (size == 1 && inKey) { + // Will read results from the key + return; + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + actualWriteValue(values, block.getBoolean(i)); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java new file mode 100644 index 0000000000000..97b2ce6da5e9b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; + +abstract class ValueExtractorForBytesRef implements ValueExtractor { + static ValueExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean inKey, BytesRefBlock block) { + BytesRefVector vector = block.asVector(); + if (vector != null) { + return new ValueExtractorForBytesRef.ForVector(encoder, inKey, vector); + } + return new ValueExtractorForBytesRef.ForBlock(encoder, inKey, block); + } + + private final TopNEncoder encoder; + + protected final BytesRef scratch = new BytesRef(); + + protected final boolean inKey; + + ValueExtractorForBytesRef(TopNEncoder encoder, boolean inKey) { + this.encoder = encoder; + this.inKey = inKey; + } + + protected final void writeCount(BytesRefBuilder values, int count) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values); + } + + protected final void actualWriteValue(BytesRefBuilder values, BytesRef value) { + encoder.encodeBytesRef(value, values); + } + + static class ForVector extends ValueExtractorForBytesRef { + private final BytesRefVector vector; + + ForVector(TopNEncoder encoder, boolean inKey, BytesRefVector vector) { + super(encoder, inKey); + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + writeCount(values, 1); + if (inKey) { + // will read results from the key + return; + } + actualWriteValue(values, vector.getBytesRef(position, scratch)); + } + } + + static class ForBlock extends ValueExtractorForBytesRef { + private final BytesRefBlock block; + + ForBlock(TopNEncoder encoder, boolean inKey, BytesRefBlock block) { + super(encoder, inKey); + this.block = block; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + int size = block.getValueCount(position); + writeCount(values, size); + if (size == 1 && inKey) { + // Will read results from the key + return; + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + actualWriteValue(values, block.getBytesRef(i, scratch)); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java new file mode 100644 index 0000000000000..0bceeea462283 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; + +abstract class ValueExtractorForDouble implements ValueExtractor { + static ValueExtractorForDouble extractorFor(TopNEncoder encoder, boolean inKey, DoubleBlock block) { + DoubleVector vector = block.asVector(); + if (vector != null) { + return new ValueExtractorForDouble.ForVector(encoder, inKey, vector); + } + return new ValueExtractorForDouble.ForBlock(encoder, inKey, block); + } + + protected final boolean inKey; + + ValueExtractorForDouble(TopNEncoder encoder, boolean inKey) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + } + + protected final void writeCount(BytesRefBuilder values, int count) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values); + } + + protected final void actualWriteValue(BytesRefBuilder values, double value) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeDouble(value, values); + } + + static class ForVector extends ValueExtractorForDouble { + private final DoubleVector vector; + + ForVector(TopNEncoder encoder, boolean inKey, DoubleVector vector) { + super(encoder, inKey); + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + writeCount(values, 1); + if (inKey) { + // will read results from the key + return; + } + actualWriteValue(values, vector.getDouble(position)); + } + } + + static class ForBlock extends ValueExtractorForDouble { + private final DoubleBlock block; + + ForBlock(TopNEncoder encoder, boolean inKey, DoubleBlock block) { + super(encoder, inKey); + this.block = block; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + int size = block.getValueCount(position); + writeCount(values, size); + if (size == 1 && inKey) { + // Will read results from the key + return; + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + actualWriteValue(values, block.getDouble(i)); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java new file mode 100644 index 0000000000000..28156ccb87cf7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; + +abstract class ValueExtractorForInt implements ValueExtractor { + static ValueExtractorForInt extractorFor(TopNEncoder encoder, boolean inKey, IntBlock block) { + IntVector vector = block.asVector(); + if (vector != null) { + return new ValueExtractorForInt.ForVector(encoder, inKey, vector); + } + return new ValueExtractorForInt.ForBlock(encoder, inKey, block); + } + + protected final boolean inKey; + + ValueExtractorForInt(TopNEncoder encoder, boolean inKey) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + } + + protected final void writeCount(BytesRefBuilder values, int count) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values); + } + + protected final void actualWriteValue(BytesRefBuilder values, int value) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeInt(value, values); + } + + static class ForVector extends ValueExtractorForInt { + private final IntVector vector; + + ForVector(TopNEncoder encoder, boolean inKey, IntVector vector) { + super(encoder, inKey); + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + writeCount(values, 1); + if (inKey) { + // will read results from the key + return; + } + actualWriteValue(values, vector.getInt(position)); + } + } + + static class ForBlock extends ValueExtractorForInt { + private final IntBlock block; + + ForBlock(TopNEncoder encoder, boolean inKey, IntBlock block) { + super(encoder, inKey); + this.block = block; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + int size = block.getValueCount(position); + writeCount(values, size); + if (size == 1 && inKey) { + // Will read results from the key + return; + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + actualWriteValue(values, block.getInt(i)); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java new file mode 100644 index 0000000000000..aec9aaf11c919 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; + +abstract class ValueExtractorForLong implements ValueExtractor { + static ValueExtractorForLong extractorFor(TopNEncoder encoder, boolean inKey, LongBlock block) { + LongVector vector = block.asVector(); + if (vector != null) { + return new ValueExtractorForLong.ForVector(encoder, inKey, vector); + } + return new ValueExtractorForLong.ForBlock(encoder, inKey, block); + } + + protected final boolean inKey; + + ValueExtractorForLong(TopNEncoder encoder, boolean inKey) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); + this.inKey = inKey; + } + + protected final void writeCount(BytesRefBuilder values, int count) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values); + } + + protected final void actualWriteValue(BytesRefBuilder values, long value) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeLong(value, values); + } + + static class ForVector extends ValueExtractorForLong { + private final LongVector vector; + + ForVector(TopNEncoder encoder, boolean inKey, LongVector vector) { + super(encoder, inKey); + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + writeCount(values, 1); + if (inKey) { + // will read results from the key + return; + } + actualWriteValue(values, vector.getLong(position)); + } + } + + static class ForBlock extends ValueExtractorForLong { + private final LongBlock block; + + ForBlock(TopNEncoder encoder, boolean inKey, LongBlock block) { + super(encoder, inKey); + this.block = block; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + int size = block.getValueCount(position); + writeCount(values, size); + if (size == 1 && inKey) { + // Will read results from the key + return; + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { + actualWriteValue(values, block.getLong(i)); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 280f2467a566c..69aa6f5bb217a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -23,4 +23,5 @@ exports org.elasticsearch.compute.operator; exports org.elasticsearch.compute.operator.exchange; exports org.elasticsearch.compute.aggregation.blockhash; + exports org.elasticsearch.compute.operator.topn; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 8f2c2474d7f63..7a2ea0ddd69c5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -52,6 +52,19 @@ public Block filter(int... positions) { return new DocBlock(asVector().filter(positions)); } + @Override + public int hashCode() { + return vector.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof DocBlock == false) { + return false; + } + return vector.equals(((DocBlock) obj).vector); + } + @Override public long ramBytesUsed() { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index cecae256e8974..eb67d89c3a869 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -11,6 +11,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; +import java.util.Objects; + /** * {@link Vector} where each entry references a lucene document. */ @@ -184,6 +186,20 @@ public boolean isConstant() { return shards.isConstant() && segments.isConstant() && docs.isConstant(); } + @Override + public int hashCode() { + return Objects.hash(shards, segments, docs); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof DocVector == false) { + return false; + } + DocVector other = (DocVector) obj; + return shards.equals(other.shards) && segments.equals(other.segments) && docs.equals(other.docs); + } + public static long ramBytesEstimated( IntVector shards, IntVector segments, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FixedLengthTopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FixedLengthTopNEncoder.java deleted file mode 100644 index 05629e93572bb..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FixedLengthTopNEncoder.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; - -public class FixedLengthTopNEncoder implements TopNEncoder { - - @Override - public void encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { - bytesRefBuilder.append(value); - } - - @Override - public String toString() { - return "FixedLengthTopNEncoder"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNEncoder.java deleted file mode 100644 index f8fd7c0c10e5a..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNEncoder.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.NumericUtils; - -/** - * Defines a default BytesRef encoding behavior for all block types, leaving text based types for concrete implementations. - */ -public interface TopNEncoder { - - default void encodeLong(long value, BytesRefBuilder bytesRefBuilder) { - bytesRefBuilder.grow(bytesRefBuilder.length() + Long.BYTES); - NumericUtils.longToSortableBytes(value, bytesRefBuilder.bytes(), bytesRefBuilder.length()); - bytesRefBuilder.setLength(bytesRefBuilder.length() + Long.BYTES); - } - - default void encodeInteger(int value, BytesRefBuilder bytesRefBuilder) { - bytesRefBuilder.grow(bytesRefBuilder.length() + Integer.BYTES); - NumericUtils.intToSortableBytes(value, bytesRefBuilder.bytes(), bytesRefBuilder.length()); - bytesRefBuilder.setLength(bytesRefBuilder.length() + Integer.BYTES); - } - - default void encodeDouble(double value, BytesRefBuilder bytesRefBuilder) { - bytesRefBuilder.grow(bytesRefBuilder.length() + Long.BYTES); - NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(value), bytesRefBuilder.bytes(), bytesRefBuilder.length()); - bytesRefBuilder.setLength(bytesRefBuilder.length() + Long.BYTES); - } - - default void encodeBoolean(boolean value, BytesRefBuilder bytesRefBuilder) { - var bytes = new byte[] { value ? (byte) 1 : (byte) 0 }; - bytesRefBuilder.append(bytes, 0, 1); - } - - void encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder); - -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNOperator.java deleted file mode 100644 index a7832f58727c1..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TopNOperator.java +++ /dev/null @@ -1,699 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DocBlock; -import org.elasticsearch.compute.data.DocVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.Page; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.BitSet; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; - -/** - * An operator that sorts "rows" of values by encoding the values to sort on, as bytes (using BytesRef). Each data type is encoded - * in a specific way, defined by methods of a TopNEncoder. All the values used to sort a specific row (think of column/block 3 - * and column/block 6) are converted/encoded in a byte array and the concatenated bytes are all compared in bulk. - * For now, the only values that have a special "treatment" when it comes to encoding are the text-based ones (text, keyword, ip, version). - * For each "special" encoding there is should be new TopNEncoder implementation. See {@link UTF8TopNEncoder} for encoding regular - * "text" and "keyword" data types. See LocalExecutionPlanner for which data type uses which encoder. - * - * This Operator will not be able to sort binary values (encoded as BytesRef) because the bytes used as separator and "null"s can appear - * as valid bytes inside a binary value. - */ -public class TopNOperator implements Operator { - - private static final byte SEPARATOR = 0x0; // separator for values inside the BytesRef sorting key - private static final byte SMALL_NULL = 0x01; // "null" representation for "nulls first" - private static final byte BIG_NULL = 0x02; // "null" representation for "nulls last" - public static final TopNEncoder BYTESREF_FIXED_LENGTH_ENCODER = new FixedLengthTopNEncoder(); - public static final TopNEncoder BYTESREF_UTF8_ENCODER = new UTF8TopNEncoder(); - public static final TopNEncoder DEFAULT_ENCODER = new TopNEncoder() { - @Override - public void encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { - throw new IllegalStateException("Cannot find encoder for BytesRef value"); - } - - @Override - public String toString() { - return "DefaultEncoder"; - } - }; - - // enum to be extended in the future with other sorting modes (AVG average, for example) - private enum MvSortMode { - MIN, - MAX - } - - /** - * Internal row to be used in the PriorityQueue instead of the full blown Page. - * It mirrors somehow the Block build in the sense that it keeps around an array of offsets and a count of values (to account for - * multivalues) to reference each position in each block of the Page. - */ - static final class Row { - boolean[] booleans; - int[] ints; - long[] longs; - double[] doubles; - BytesRef[] byteRefs; - int[] docs; - boolean[] nullValues; - - int[] idToFirstValueIndex; // keeps the offset inside each of the arrays above where a specific block position starts from - ElementType[] idToType; - int[] numberOfValues; // keeps the count of values of each field in the specialized array - - BitSet blockIsUnordered; - BytesRefBuilder orderByCompositeKey = new BytesRefBuilder(); // BytesRef used to sort rows between each other - /** - * A true/false value (bit set/unset) for each byte in the BytesRef above corresponding to an asc/desc ordering. - * For ex, if a Long is represented as 8 bytes, each of these bytes will have the same value (set/unset) if the respective Long - * value is used for sorting ascending/descending. - */ - BitSet orderByCompositeKeyAscending; - - boolean isNull(int i) { - return nullValues[i]; - } - - boolean getBoolean(int i, int offset) { - return booleans[idToFirstValueIndex[i] + offset]; - } - - int getInt(int i, int offset) { - return ints[idToFirstValueIndex[i] + offset]; - } - - long getLong(int i, int offset) { - return longs[idToFirstValueIndex[i] + offset]; - } - - double getDouble(int i, int offset) { - return doubles[idToFirstValueIndex[i] + offset]; - } - - BytesRef getBytesRef(int i, int offset) { - return byteRefs[idToFirstValueIndex[i] + offset]; - } - - boolean blockIsUnordered(int i) { - return blockIsUnordered.get(i); - } - } - - static final class RowFactory { - - int size; - int nBooleans; - int nInts; - int nLongs; - int nDoubles; - int nByteRefs; - int nDocs; - - ElementType[] idToType; - - RowFactory(Page page) { - size = page.getBlockCount(); - idToType = new ElementType[size]; - for (int i = 0; i < size; i++) { - Block block = page.getBlock(i); - switch (block.elementType()) { - case LONG -> nLongs++; - case INT -> nInts++; - case DOUBLE -> nDoubles++; - case BYTES_REF -> nByteRefs++; - case BOOLEAN -> nBooleans++; - case DOC -> nDocs++; - case NULL -> { - } - case UNKNOWN -> { - assert false : "Must not occur here as TopN should never receive intermediate blocks"; - throw new UnsupportedOperationException("Block doesn't support retrieving elements"); - } - } - idToType[i] = block.elementType(); - } - } - - Row row(Page origin, int rowNum, Row spare, List sortOrders) { - Row result; - if (spare == null) { - result = new Row(); - result.nullValues = new boolean[size]; - result.booleans = new boolean[nBooleans]; - result.ints = new int[nInts]; - result.longs = new long[nLongs]; - result.doubles = new double[nDoubles]; - result.byteRefs = new BytesRef[nByteRefs]; - for (int i = 0; i < nByteRefs; i++) { - result.byteRefs[i] = new BytesRef(); - } - result.idToFirstValueIndex = new int[size]; - result.idToType = idToType; - result.docs = new int[nDocs * 3]; - result.numberOfValues = new int[size]; - result.orderByCompositeKeyAscending = new BitSet(); - } else { - result = spare; - // idToType has to be set because different pages could have different block types due to different mappings - result.idToType = idToType; - Arrays.fill(result.nullValues, false); - result.orderByCompositeKey = new BytesRefBuilder(); - result.orderByCompositeKeyAscending.clear(); - } - result.blockIsUnordered = new BitSet(size); - - int lastLongFirstValueIndex = 0; - int lastIntFirstValueIndex = 0; - int lastDoubleFirstValueIndex = 0; - int lastBytesRefFirstValueIndex = 0; - int lastBooleanFirstValueIndex = 0; - int lastDocFirstValueIndex = 0; - - for (int i = 0; i < size; i++) { - Block block = origin.getBlock(i); - if (block.mvOrdering() == Block.MvOrdering.UNORDERED) { - result.blockIsUnordered.set(i); - } - if (block.isNull(rowNum)) { - result.nullValues[i] = true; - } else { - int valuesCount = block.getValueCount(rowNum); - result.numberOfValues[i] = valuesCount; - switch (block.elementType()) { - case LONG -> { - int firstValueIndex = lastLongFirstValueIndex; - if (firstValueIndex + valuesCount > result.longs.length) { - result.longs = Arrays.copyOf(result.longs, firstValueIndex + valuesCount); - } - int start = block.getFirstValueIndex(rowNum); - int end = start + valuesCount; - for (int j = start, offset = 0; j < end; j++, offset++) { - result.longs[firstValueIndex + offset] = ((LongBlock) block).getLong(j); - } - result.idToFirstValueIndex[i] = firstValueIndex; - lastLongFirstValueIndex = firstValueIndex + valuesCount; - } - case INT -> { - int firstValueIndex = lastIntFirstValueIndex; - if (firstValueIndex + valuesCount > result.ints.length) { - result.ints = Arrays.copyOf(result.ints, firstValueIndex + valuesCount); - } - int start = block.getFirstValueIndex(rowNum); - int end = start + valuesCount; - for (int j = start, offset = 0; j < end; j++, offset++) { - result.ints[firstValueIndex + offset] = ((IntBlock) block).getInt(j); - } - result.idToFirstValueIndex[i] = firstValueIndex; - lastIntFirstValueIndex = firstValueIndex + valuesCount; - } - case DOUBLE -> { - int firstValueIndex = lastDoubleFirstValueIndex; - if (firstValueIndex + valuesCount > result.doubles.length) { - result.doubles = Arrays.copyOf(result.doubles, firstValueIndex + valuesCount); - } - int start = block.getFirstValueIndex(rowNum); - int end = start + valuesCount; - for (int j = start, offset = 0; j < end; j++, offset++) { - result.doubles[firstValueIndex + offset] = ((DoubleBlock) block).getDouble(j); - } - result.idToFirstValueIndex[i] = firstValueIndex; - lastDoubleFirstValueIndex = firstValueIndex + valuesCount; - } - case BYTES_REF -> { - int firstValueIndex = lastBytesRefFirstValueIndex; - if (firstValueIndex + valuesCount > result.byteRefs.length) { - int additionalSize = firstValueIndex + valuesCount - result.byteRefs.length; - result.byteRefs = Arrays.copyOf(result.byteRefs, firstValueIndex + valuesCount); - for (int j = 1; j <= additionalSize; j++) { - result.byteRefs[result.byteRefs.length - j] = new BytesRef(); - } - } - int start = block.getFirstValueIndex(rowNum); - int end = start + valuesCount; - for (int j = start, offset = 0; j < end; j++, offset++) { - BytesRef b = result.byteRefs[firstValueIndex + offset]; - b = ((BytesRefBlock) block).getBytesRef(j, b); - result.byteRefs[firstValueIndex + offset] = b; - } - result.idToFirstValueIndex[i] = firstValueIndex; - lastBytesRefFirstValueIndex = firstValueIndex + valuesCount; - } - case BOOLEAN -> { - int firstValueIndex = lastBooleanFirstValueIndex; - if (firstValueIndex + valuesCount > result.booleans.length) { - result.booleans = Arrays.copyOf(result.booleans, firstValueIndex + valuesCount); - } - int start = block.getFirstValueIndex(rowNum); - int end = start + valuesCount; - for (int j = start, offset = 0; j < end; j++, offset++) { - result.booleans[firstValueIndex + offset] = ((BooleanBlock) block).getBoolean(j); - } - result.idToFirstValueIndex[i] = firstValueIndex; - lastBooleanFirstValueIndex = firstValueIndex + valuesCount; - } - case DOC -> { - int firstValueIndex = lastDocFirstValueIndex; - if (firstValueIndex + 3 > result.docs.length) { - result.docs = Arrays.copyOf(result.docs, firstValueIndex + 3); - } - DocVector doc = ((DocBlock) block).asVector(); - result.docs[firstValueIndex] = doc.shards().getInt(rowNum); - result.docs[firstValueIndex + 1] = doc.segments().getInt(rowNum); - result.docs[firstValueIndex + 2] = doc.docs().getInt(rowNum); - - result.idToFirstValueIndex[i] = firstValueIndex; - lastDocFirstValueIndex = firstValueIndex + 3; - } - case NULL -> { - assert false : "Must not occur here as we check nulls above already"; - throw new UnsupportedOperationException("Block of nulls doesn't support comparison"); - } - default -> { - assert false : "Must not occur here as TopN should never receive intermediate blocks"; - throw new UnsupportedOperationException("Block doesn't support retrieving elements"); - } - - } - } - } - - int orderByCompositeKeyCurrentPosition = 0; - for (SortOrder so : sortOrders) { - byte nul, nonNul; - if (so.nullsFirst) { - nul = so.asc ? SMALL_NULL : BIG_NULL; - nonNul = so.asc ? BIG_NULL : SMALL_NULL; - } else { - nul = so.asc ? BIG_NULL : SMALL_NULL; - nonNul = so.asc ? SMALL_NULL : BIG_NULL; - } - - MvSortMode sortMode = so.asc ? MvSortMode.MIN : MvSortMode.MAX; - int mvOffset = result.blockIsUnordered(so.channel) - ? -1 - : (sortMode == MvSortMode.MIN ? 0 : result.numberOfValues[so.channel] - 1); - int valueAsBytesSize; - - if (result.isNull(so.channel)) { - result.orderByCompositeKey.append(nul); - valueAsBytesSize = 0; - } else { - result.orderByCompositeKey.append(nonNul); - switch (result.idToType[so.channel]) { - case LONG -> { - long rowValue; - if (mvOffset >= 0) { - rowValue = result.getLong(so.channel, mvOffset); - } else { - rowValue = result.getLong(so.channel, 0); - for (int j = 1; j < result.numberOfValues[so.channel]; j++) { - long value = result.getLong(so.channel, j); - if (sortMode == MvSortMode.MIN) { - rowValue = Math.min(value, rowValue); - } else if (sortMode == MvSortMode.MAX) { - rowValue = Math.max(value, rowValue); - } - } - } - so.encoder.encodeLong(rowValue, result.orderByCompositeKey); - valueAsBytesSize = Long.BYTES; - } - case INT -> { - int rowValue; - if (mvOffset >= 0) { - rowValue = result.getInt(so.channel, mvOffset); - } else { - rowValue = result.getInt(so.channel, 0); - for (int j = 1; j < result.numberOfValues[so.channel]; j++) { - int value = result.getInt(so.channel, j); - if (sortMode == MvSortMode.MIN) { - rowValue = Math.min(value, rowValue); - } else if (sortMode == MvSortMode.MAX) { - rowValue = Math.max(value, rowValue); - } - } - } - so.encoder.encodeInteger(rowValue, result.orderByCompositeKey); - valueAsBytesSize = Integer.BYTES; - } - case DOUBLE -> { - double rowValue; - if (mvOffset >= 0) { - rowValue = result.getDouble(so.channel, mvOffset); - } else { - rowValue = result.getDouble(so.channel, 0); - for (int j = 1; j < result.numberOfValues[so.channel]; j++) { - double value = result.getDouble(so.channel, j); - if (sortMode == MvSortMode.MIN) { - rowValue = Math.min(value, rowValue); - } else if (sortMode == MvSortMode.MAX) { - rowValue = Math.max(value, rowValue); - } - } - } - so.encoder.encodeDouble(rowValue, result.orderByCompositeKey); - valueAsBytesSize = Long.BYTES; - } - case BYTES_REF -> { - BytesRef rowValue; - if (mvOffset >= 0) { - rowValue = result.getBytesRef(so.channel, mvOffset); - } else { - rowValue = result.getBytesRef(so.channel, 0); - for (int j = 1; j < result.numberOfValues[so.channel]; j++) { - BytesRef value = result.getBytesRef(so.channel, j); - int compare = value.compareTo(rowValue); - if (sortMode == MvSortMode.MIN && compare < 0 || sortMode == MvSortMode.MAX && compare > 0) { - rowValue = value; - } - } - } - so.encoder.encodeBytesRef(rowValue, result.orderByCompositeKey); - valueAsBytesSize = rowValue.length; - } - case BOOLEAN -> { - boolean rowValue; - if (mvOffset >= 0) { - rowValue = result.getBoolean(so.channel, mvOffset); - } else { - rowValue = result.getBoolean(so.channel, 0); - for (int j = 1; j < result.numberOfValues[so.channel] - && (sortMode == MvSortMode.MIN && rowValue || sortMode == MvSortMode.MAX && rowValue == false); j++) { - boolean value = result.getBoolean(so.channel, j); - if (sortMode == MvSortMode.MIN && value == false) { - rowValue = false; - } else if (sortMode == MvSortMode.MAX && value) { - rowValue = true; - } - } - } - so.encoder.encodeBoolean(rowValue, result.orderByCompositeKey); - valueAsBytesSize = 1; - } - default -> { - assert false : "Must not occur here as TopN should never receive intermediate blocks"; - throw new UnsupportedOperationException("Block doesn't support retrieving elements"); - } - } - } - result.orderByCompositeKeyAscending.set( - orderByCompositeKeyCurrentPosition, - valueAsBytesSize + orderByCompositeKeyCurrentPosition + 2, - so.asc - ); - orderByCompositeKeyCurrentPosition += valueAsBytesSize + 2; - result.orderByCompositeKey.append(SEPARATOR); - } - - return result; - } - } - - public record SortOrder(int channel, boolean asc, boolean nullsFirst, TopNEncoder encoder) { - - public SortOrder(int channel, boolean asc, boolean nullsFirst) { - this(channel, asc, nullsFirst, DEFAULT_ENCODER); - } - - @Override - public String toString() { - return "SortOrder[channel=" - + this.channel - + ", asc=" - + this.asc - + ", nullsFirst=" - + this.nullsFirst - + ", encoder=" - + this.encoder - + "]"; - } - } - - public record TopNOperatorFactory(int topCount, List sortOrders, int maxPageSize) implements OperatorFactory { - - @Override - public Operator get(DriverContext driverContext) { - return new TopNOperator(topCount, sortOrders, maxPageSize); - } - - @Override - public String describe() { - return "TopNOperator[count = " + topCount + ", sortOrders = " + sortOrders + "]"; - } - } - - private final PriorityQueue inputQueue; - - private final int maxPageSize; - private RowFactory rowFactory; - - private final List sortOrders; - - // these will be inferred at runtime: one input page might not contain all the information needed - // eg. it could be missing some fields in the mapping, so it could have NULL blocks as placeholders - private ElementType[] outputTypes; - - private Iterator output; - - public TopNOperator(int topCount, List sortOrders, int maxPageSize) { - this.maxPageSize = maxPageSize; - this.sortOrders = sortOrders; - this.inputQueue = new PriorityQueue<>(topCount) { - @Override - protected boolean lessThan(Row r1, Row r2) { - return compareRows(r1, r2) < 0; - } - - @Override - public String toString() { - if (sortOrders.size() == 1) { - SortOrder order = sortOrders.get(0); - return "count = " + size() + "/" + topCount + ", sortOrder = " + order; - } else { - return "count = " + size() + "/" + topCount + ", sortOrders = " + sortOrders; - } - } - }; - } - - static int compareRows(Row r1, Row r2) { - // This is simliar to r1.orderByCompositeKey.compareTo(r2.orderByCompositeKey) but stopping somewhere in the middle so that - // we check the byte that mismatched - BytesRef br1 = r1.orderByCompositeKey.get(); - BytesRef br2 = r2.orderByCompositeKey.get(); - int mismatchedByteIndex = Arrays.mismatch( - br1.bytes, - br1.offset, - br1.offset + br1.length, - br2.bytes, - br2.offset, - br2.offset + br2.length - ); - if (mismatchedByteIndex < 0) { - // the two rows are equal - return 0; - } - int length = Math.min(br1.length, br2.length); - // one value is the prefix of the other - if (mismatchedByteIndex == length) { - // the value with the greater length is considered greater than the other - if (length == br1.length) {// first row is less than the second row - return r2.orderByCompositeKeyAscending.get(length) ? 1 : -1; - } else {// second row is less than the first row - return r1.orderByCompositeKeyAscending.get(length) ? -1 : 1; - } - } else { - // compare the byte that mismatched accounting for that respective byte asc/desc ordering - int c = Byte.compareUnsigned( - r1.orderByCompositeKey.bytes()[br1.offset + mismatchedByteIndex], - r2.orderByCompositeKey.bytes()[br2.offset + mismatchedByteIndex] - ); - return r1.orderByCompositeKeyAscending.get(mismatchedByteIndex) ? -c : c; - } - } - - @Override - public boolean needsInput() { - return output == null; - } - - @Override - public void addInput(Page page) { - // rebuild for every page, since blocks can originate from different indices, with different mapping - rowFactory = new RowFactory(page); - if (outputTypes == null) { - outputTypes = Arrays.copyOf(rowFactory.idToType, rowFactory.idToType.length); - } else { - for (int i = 0; i < rowFactory.idToType.length; i++) { - if (outputTypes[i] == ElementType.NULL) { // the type could just be missing in the previous mappings - outputTypes[i] = rowFactory.idToType[i]; - } - } - } - - Row removed = null; - for (int i = 0; i < page.getPositionCount(); i++) { - Row x = rowFactory.row(page, i, removed, sortOrders); - removed = inputQueue.insertWithOverflow(x); - } - } - - @Override - public void finish() { - if (output == null) { - output = toPages(); - } - } - - private Iterator toPages() { - if (inputQueue.size() == 0) { - return Collections.emptyIterator(); - } - List list = new ArrayList<>(inputQueue.size()); - while (inputQueue.size() > 0) { - list.add(inputQueue.pop()); - } - Collections.reverse(list); - - List result = new ArrayList<>(); - Block.Builder[] builders = null; - int p = 0; - int size = 0; - for (int i = 0; i < list.size(); i++) { - if (builders == null) { - size = Math.min(maxPageSize, list.size() - i); - builders = new Block.Builder[rowFactory.size]; - for (int b = 0; b < builders.length; b++) { - builders[b] = outputTypes[b].newBlockBuilder(size); - } - p = 0; - } - - Row row = list.get(i); - for (int b = 0; b < builders.length; b++) { - if (row.isNull(b)) { - builders[b].appendNull(); - continue; - } - switch (outputTypes[b]) { - case BOOLEAN -> { - if (row.numberOfValues[b] > 1) { - ((BooleanBlock.Builder) builders[b]).beginPositionEntry(); - for (int j = 0; j < row.numberOfValues[b]; j++) { - ((BooleanBlock.Builder) builders[b]).appendBoolean(row.getBoolean(b, j)); - } - ((BooleanBlock.Builder) builders[b]).endPositionEntry(); - } else { - ((BooleanBlock.Builder) builders[b]).appendBoolean(row.getBoolean(b, 0)); - } - } - case INT -> { - if (row.numberOfValues[b] > 1) { - ((IntBlock.Builder) builders[b]).beginPositionEntry(); - for (int j = 0; j < row.numberOfValues[b]; j++) { - ((IntBlock.Builder) builders[b]).appendInt(row.getInt(b, j)); - } - ((IntBlock.Builder) builders[b]).endPositionEntry(); - } else { - ((IntBlock.Builder) builders[b]).appendInt(row.getInt(b, 0)); - } - } - case LONG -> { - if (row.numberOfValues[b] > 1) { - ((LongBlock.Builder) builders[b]).beginPositionEntry(); - for (int j = 0; j < row.numberOfValues[b]; j++) { - ((LongBlock.Builder) builders[b]).appendLong(row.getLong(b, j)); - } - ((LongBlock.Builder) builders[b]).endPositionEntry(); - } else { - ((LongBlock.Builder) builders[b]).appendLong(row.getLong(b, 0)); - } - } - case DOUBLE -> { - if (row.numberOfValues[b] > 1) { - ((DoubleBlock.Builder) builders[b]).beginPositionEntry(); - for (int j = 0; j < row.numberOfValues[b]; j++) { - ((DoubleBlock.Builder) builders[b]).appendDouble(row.getDouble(b, j)); - } - ((DoubleBlock.Builder) builders[b]).endPositionEntry(); - } else { - ((DoubleBlock.Builder) builders[b]).appendDouble(row.getDouble(b, 0)); - } - } - case BYTES_REF -> { - if (row.numberOfValues[b] > 1) { - ((BytesRefBlock.Builder) builders[b]).beginPositionEntry(); - for (int j = 0; j < row.numberOfValues[b]; j++) { - ((BytesRefBlock.Builder) builders[b]).appendBytesRef(row.getBytesRef(b, j)); - } - ((BytesRefBlock.Builder) builders[b]).endPositionEntry(); - } else { - ((BytesRefBlock.Builder) builders[b]).appendBytesRef(row.getBytesRef(b, 0)); - } - } - case DOC -> { - int dp = row.idToFirstValueIndex[b]; - int shard = row.docs[dp++]; - int segment = row.docs[dp++]; - int doc = row.docs[dp]; - ((DocBlock.Builder) builders[b]).appendShard(shard).appendSegment(segment).appendDoc(doc); - } - case NULL -> builders[b].appendNull(); - default -> throw new IllegalStateException("unsupported type [" + rowFactory.idToType[b] + "]"); - } - } - - p++; - if (p == size) { - result.add(new Page(Arrays.stream(builders).map(Block.Builder::build).toArray(Block[]::new))); - builders = null; - } - } - assert builders == null; - return result.iterator(); - } - - @Override - public boolean isFinished() { - return output != null && output.hasNext() == false; - } - - @Override - public Page getOutput() { - if (output != null && output.hasNext()) { - return output.next(); - } - return null; - } - - @Override - public void close() { - - } - - @Override - public String toString() { - return "TopNOperator[" + inputQueue + "]"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/UTF8TopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/UTF8TopNEncoder.java deleted file mode 100644 index 9cab7c9e000aa..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/UTF8TopNEncoder.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; - -public class UTF8TopNEncoder implements TopNEncoder { - - private static final int CONTINUATION_BYTE = 0b1000_0000; - - @Override - public void encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { - // add one bit to every byte so that there are no "0" bytes in the provided bytes. The only "0" bytes are - // those defined as separators - int end = value.offset + value.length; - for (int i = value.offset; i < end; i++) { - byte b = value.bytes[i]; - if ((b & CONTINUATION_BYTE) == 0) { - b++; - } - bytesRefBuilder.append(b); - } - } - - @Override - public String toString() { - return "UTF8TopNEncoder"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultSortableTopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultSortableTopNEncoder.java new file mode 100644 index 0000000000000..3b3ba69407065 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultSortableTopNEncoder.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; + +class DefaultSortableTopNEncoder extends SortableTopNEncoder { + @Override + public int encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { + throw new IllegalStateException("Cannot find encoder for BytesRef value"); + } + + @Override + public BytesRef decodeBytesRef(BytesRef bytes, BytesRef scratch) { + throw new IllegalStateException("Cannot find encoder for BytesRef value"); + } + + @Override + public String toString() { + return "DefaultUnsortable"; + } + + @Override + public TopNEncoder toSortable() { + return this; + } + + @Override + public TopNEncoder toUnsortable() { + return TopNEncoder.DEFAULT_UNSORTABLE; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoder.java new file mode 100644 index 0000000000000..668353b86519a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoder.java @@ -0,0 +1,181 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; + +import java.lang.invoke.MethodHandles; +import java.lang.invoke.VarHandle; +import java.nio.ByteOrder; + +/** + * A {@link TopNEncoder} that doesn't encode values so they are sortable but is + * capable of encoding any values. + */ +final class DefaultUnsortableTopNEncoder implements TopNEncoder { + public static final VarHandle LONG = MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.nativeOrder()); + public static final VarHandle INT = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.nativeOrder()); + public static final VarHandle DOUBLE = MethodHandles.byteArrayViewVarHandle(double[].class, ByteOrder.nativeOrder()); + + @Override + public void encodeLong(long value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.grow(bytesRefBuilder.length() + Long.BYTES); + LONG.set(bytesRefBuilder.bytes(), bytesRefBuilder.length(), value); + bytesRefBuilder.setLength(bytesRefBuilder.length() + Long.BYTES); + } + + @Override + public long decodeLong(BytesRef bytes) { + if (bytes.length < Long.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + long v = (long) LONG.get(bytes.bytes, bytes.offset); + bytes.offset += Long.BYTES; + bytes.length -= Long.BYTES; + return v; + } + + /** + * Writes an int in a variable-length format. Writes between one and + * five bytes. Smaller values take fewer bytes. Negative numbers + * will always use all 5 bytes. + */ + public void encodeVInt(int value, BytesRefBuilder bytesRefBuilder) { + while ((value & ~0x7F) != 0) { + bytesRefBuilder.append(((byte) ((value & 0x7f) | 0x80))); + value >>>= 7; + } + bytesRefBuilder.append((byte) value); + } + + /** + * Reads an int stored in variable-length format. Reads between one and + * five bytes. Smaller values take fewer bytes. Negative numbers + * will always use all 5 bytes. + */ + public int decodeVInt(BytesRef bytes) { + /* + * The loop for this is unrolled because we unrolled the loop in StreamInput. + * I presume it's a decent choice here because it was a good choice there. + */ + byte b = bytes.bytes[bytes.offset]; + if (b >= 0) { + bytes.offset += 1; + bytes.length -= 1; + return b; + } + int i = b & 0x7F; + b = bytes.bytes[bytes.offset + 1]; + i |= (b & 0x7F) << 7; + if (b >= 0) { + bytes.offset += 2; + bytes.length -= 2; + return i; + } + b = bytes.bytes[bytes.offset + 2]; + i |= (b & 0x7F) << 14; + if (b >= 0) { + bytes.offset += 3; + bytes.length -= 3; + return i; + } + b = bytes.bytes[bytes.offset + 3]; + i |= (b & 0x7F) << 21; + if (b >= 0) { + bytes.offset += 4; + bytes.length -= 4; + return i; + } + b = bytes.bytes[bytes.offset + 4]; + i |= (b & 0x0F) << 28; + if ((b & 0xF0) != 0) { + throw new IllegalStateException("Invalid last byte for a vint [" + Integer.toHexString(b) + "]"); + } + bytes.offset += 5; + bytes.length -= 5; + return i; + } + + @Override + public void encodeInt(int value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.grow(bytesRefBuilder.length() + Integer.BYTES); + INT.set(bytesRefBuilder.bytes(), bytesRefBuilder.length(), value); + bytesRefBuilder.setLength(bytesRefBuilder.length() + Integer.BYTES); + } + + @Override + public int decodeInt(BytesRef bytes) { + if (bytes.length < Integer.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + int v = (int) INT.get(bytes.bytes, bytes.offset); + bytes.offset += Integer.BYTES; + bytes.length -= Integer.BYTES; + return v; + } + + @Override + public void encodeDouble(double value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.grow(bytesRefBuilder.length() + Double.BYTES); + DOUBLE.set(bytesRefBuilder.bytes(), bytesRefBuilder.length(), value); + bytesRefBuilder.setLength(bytesRefBuilder.length() + Long.BYTES); + } + + @Override + public double decodeDouble(BytesRef bytes) { + if (bytes.length < Double.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + double v = (double) DOUBLE.get(bytes.bytes, bytes.offset); + bytes.offset += Double.BYTES; + bytes.length -= Double.BYTES; + return v; + } + + @Override + public void encodeBoolean(boolean value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.append(value ? (byte) 1 : (byte) 0); + } + + @Override + public boolean decodeBoolean(BytesRef bytes) { + if (bytes.length < Byte.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + boolean v = bytes.bytes[bytes.offset] == 1; + bytes.offset += Byte.BYTES; + bytes.length -= Byte.BYTES; + return v; + } + + @Override + public int encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { + throw new UnsupportedOperationException(); + } + + @Override + public BytesRef decodeBytesRef(BytesRef bytes, BytesRef scratch) { + throw new UnsupportedOperationException(); + } + + @Override + public TopNEncoder toSortable() { + return TopNEncoder.DEFAULT_SORTABLE; + } + + @Override + public TopNEncoder toUnsortable() { + return this; + } + + @Override + public String toString() { + return "DefaultUnsortable"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/FixedLengthTopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/FixedLengthTopNEncoder.java new file mode 100644 index 0000000000000..fbca1080c871c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/FixedLengthTopNEncoder.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; + +class FixedLengthTopNEncoder extends SortableTopNEncoder { + private final int length; + + FixedLengthTopNEncoder(int length) { + this.length = length; + } + + @Override + public int encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { + if (value.length != length) { + throw new IllegalArgumentException("expected exactly [" + length + "] bytes but got [" + value.length + "]"); + } + bytesRefBuilder.append(value); + return length; + } + + @Override + public BytesRef decodeBytesRef(BytesRef bytes, BytesRef scratch) { + if (bytes.length < length) { + throw new IllegalArgumentException("expected [" + length + "] bytes but only [" + bytes.length + "] remain"); + } + scratch.bytes = bytes.bytes; + scratch.offset = bytes.offset; + scratch.length = length; + bytes.offset += length; + bytes.length -= length; + return scratch; + } + + @Override + public String toString() { + return "FixedLengthTopNEncoder[" + length + "]"; + } + + @Override + public TopNEncoder toSortable() { + return this; + } + + @Override + public TopNEncoder toUnsortable() { + return this; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractor.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractor.java new file mode 100644 index 0000000000000..19daa1aba8d03 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractor.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; + +/** + * Extracts keys into a {@link BytesRefBuilder}. + */ +interface KeyExtractor { + int writeKey(BytesRefBuilder key, int position); + + static KeyExtractor extractorFor(ElementType elementType, TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, Block block) { + if (false == (elementType == block.elementType() || ElementType.NULL == block.elementType())) { + throw new IllegalArgumentException("Expected [" + elementType + "] but was [" + block.elementType() + "]"); + } + return switch (block.elementType()) { + case BOOLEAN -> KeyExtractorForBoolean.extractorFor(encoder, ascending, nul, nonNul, (BooleanBlock) block); + case BYTES_REF -> KeyExtractorForBytesRef.extractorFor(encoder, ascending, nul, nonNul, (BytesRefBlock) block); + case INT -> KeyExtractorForInt.extractorFor(encoder, ascending, nul, nonNul, (IntBlock) block); + case LONG -> KeyExtractorForLong.extractorFor(encoder, ascending, nul, nonNul, (LongBlock) block); + case DOUBLE -> KeyExtractorForDouble.extractorFor(encoder, ascending, nul, nonNul, (DoubleBlock) block); + case NULL -> new KeyExtractorForNull(nul); + default -> { + assert false : "No key extractor for [" + block.elementType() + "]"; + throw new UnsupportedOperationException("No key extractor for [" + block.elementType() + "]"); + } + }; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractorForNull.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractorForNull.java new file mode 100644 index 0000000000000..cea218f6036de --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/KeyExtractorForNull.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; + +class KeyExtractorForNull implements KeyExtractor { + private final byte nul; + + KeyExtractorForNull(byte nul) { + this.nul = nul; + } + + @Override + public int writeKey(BytesRefBuilder values, int position) { + values.append(nul); + return 1; + } + + @Override + public String toString() { + return "KeyExtractorForNull(" + Integer.toHexString(nul & 0xff) + ")"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java new file mode 100644 index 0000000000000..b8a41a3ee343d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; + +/** + * Builds {@link Block}s from keys and values encoded into {@link BytesRef}s. + */ +interface ResultBuilder { + /** + * Called for each sort key before {@link #decodeValue} to consume the sort key and + * store the value of the key for {@link #decodeValue} can use it to reconstruct + * the value. This will only be called if the value is part of the key. + */ + void decodeKey(BytesRef keys); + + /** + * Called once per row to decode the value and write to the internal {@link Block.Builder}. + * If the value is part of the key then {@link #decodeKey} will be called first and + * implementations can store keys in that method and reuse them in this method. Most + * implementations don't write single valued fields that appear in the key and instead + * use the value form {@link #decodeKey}. + */ + void decodeValue(BytesRef values); + + /** + * Build the result block. + */ + Block build(); + + static ResultBuilder resultBuilderFor(ElementType elementType, TopNEncoder encoder, boolean inKey, int positions) { + return switch (elementType) { + case BOOLEAN -> new ResultBuilderForBoolean(encoder, inKey, positions); + case BYTES_REF -> new ResultBuilderForBytesRef(encoder, inKey, positions); + case INT -> new ResultBuilderForInt(encoder, inKey, positions); + case LONG -> new ResultBuilderForLong(encoder, inKey, positions); + case DOUBLE -> new ResultBuilderForDouble(encoder, inKey, positions); + case NULL -> new ResultBuilderForNull(); + case DOC -> new ResultBuilderForDoc(positions); + default -> { + assert false : "Result builder for [" + elementType + "]"; + throw new UnsupportedOperationException("Result builder for [" + elementType + "]"); + } + }; + } + +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java new file mode 100644 index 0000000000000..a825b7d160551 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.IntArrayVector; + +class ResultBuilderForDoc implements ResultBuilder { + private final int[] shards; + private final int[] segments; + private final int[] docs; + private int position; + + ResultBuilderForDoc(int positions) { + this.shards = new int[positions]; + this.segments = new int[positions]; + this.docs = new int[positions]; + } + + @Override + public void decodeKey(BytesRef keys) { + throw new AssertionError("_doc can't be a key"); + } + + @Override + public void decodeValue(BytesRef values) { + shards[position] = TopNEncoder.DEFAULT_UNSORTABLE.decodeInt(values); + segments[position] = TopNEncoder.DEFAULT_UNSORTABLE.decodeInt(values); + docs[position] = TopNEncoder.DEFAULT_UNSORTABLE.decodeInt(values); + position++; + } + + @Override + public Block build() { + return new DocVector( + new IntArrayVector(shards, position), + new IntArrayVector(segments, position), + new IntArrayVector(docs, position), + null + ).asBlock(); + } + + @Override + public String toString() { + return "ValueExtractorForDoc"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java new file mode 100644 index 0000000000000..05b9ba2a07658 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; + +public class ResultBuilderForNull implements ResultBuilder { + private int positions; + + @Override + public void decodeKey(BytesRef keys) { + throw new AssertionError("somehow got a value for a null key"); + } + + @Override + public void decodeValue(BytesRef values) { + int size = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + if (size != 0) { + throw new IllegalArgumentException("null columns should always have 0 entries"); + } + positions++; + } + + @Override + public Block build() { + return Block.constantNullBlock(positions); + } + + @Override + public String toString() { + return "ValueExtractorForNull"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/SortableTopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/SortableTopNEncoder.java new file mode 100644 index 0000000000000..21b8b9d3d2e36 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/SortableTopNEncoder.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.NumericUtils; + +/** + * A {@link TopNEncoder} that encodes values to byte arrays that may be sorted directly. + */ +public abstract class SortableTopNEncoder implements TopNEncoder { + @Override + public final void encodeLong(long value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.grow(bytesRefBuilder.length() + Long.BYTES); + NumericUtils.longToSortableBytes(value, bytesRefBuilder.bytes(), bytesRefBuilder.length()); + bytesRefBuilder.setLength(bytesRefBuilder.length() + Long.BYTES); + } + + @Override + public final long decodeLong(BytesRef bytes) { + if (bytes.length < Long.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + long v = NumericUtils.sortableBytesToLong(bytes.bytes, bytes.offset); + bytes.offset += Long.BYTES; + bytes.length -= Long.BYTES; + return v; + } + + @Override + public final void encodeInt(int value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.grow(bytesRefBuilder.length() + Integer.BYTES); + NumericUtils.intToSortableBytes(value, bytesRefBuilder.bytes(), bytesRefBuilder.length()); + bytesRefBuilder.setLength(bytesRefBuilder.length() + Integer.BYTES); + } + + @Override + public final int decodeInt(BytesRef bytes) { + if (bytes.length < Integer.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + int v = NumericUtils.sortableBytesToInt(bytes.bytes, bytes.offset); + bytes.offset += Integer.BYTES; + bytes.length -= Integer.BYTES; + return v; + } + + @Override + public final void encodeDouble(double value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.grow(bytesRefBuilder.length() + Long.BYTES); + NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(value), bytesRefBuilder.bytes(), bytesRefBuilder.length()); + bytesRefBuilder.setLength(bytesRefBuilder.length() + Long.BYTES); + } + + @Override + public final double decodeDouble(BytesRef bytes) { + if (bytes.length < Double.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + double v = NumericUtils.sortableLongToDouble(NumericUtils.sortableBytesToLong(bytes.bytes, bytes.offset)); + bytes.offset += Double.BYTES; + bytes.length -= Double.BYTES; + return v; + } + + @Override + public final void encodeBoolean(boolean value, BytesRefBuilder bytesRefBuilder) { + bytesRefBuilder.append(value ? (byte) 1 : (byte) 0); + } + + @Override + public final boolean decodeBoolean(BytesRef bytes) { + if (bytes.length < Byte.BYTES) { + throw new IllegalArgumentException("not enough bytes"); + } + boolean v = bytes.bytes[bytes.offset] == 1; + bytes.offset += Byte.BYTES; + bytes.length -= Byte.BYTES; + return v; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNEncoder.java new file mode 100644 index 0000000000000..220a31a8fdac2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNEncoder.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; + +/** + * Encodes values for {@link TopNOperator}. Some encoders encode values so sorting + * the bytes will sort the values. This is called "sortable" and you can always + * go from any {@link TopNEncoder} to a "sortable" version of it with {@link #toSortable()}. + * If you don't need the bytes to be sortable you can get an "unsortable" encoder + * with {@link #toUnsortable()}. + */ +public interface TopNEncoder { + /** + * An encoder that encodes values such that sorting the bytes sorts the values. + */ + DefaultSortableTopNEncoder DEFAULT_SORTABLE = new DefaultSortableTopNEncoder(); + /** + * An encoder that encodes values as compactly as possible without making the + * encoded bytes sortable. + */ + DefaultUnsortableTopNEncoder DEFAULT_UNSORTABLE = new DefaultUnsortableTopNEncoder(); + /** + * An encoder for IP addresses. + */ + FixedLengthTopNEncoder IP = new FixedLengthTopNEncoder(InetAddressPoint.BYTES); + /** + * An encoder for UTF-8 text. + */ + UTF8TopNEncoder UTF8 = new UTF8TopNEncoder(); + /** + * An encoder for semver versions. + */ + VersionTopNEncoder VERSION = new VersionTopNEncoder(); + + void encodeLong(long value, BytesRefBuilder bytesRefBuilder); + + long decodeLong(BytesRef bytes); + + void encodeInt(int value, BytesRefBuilder bytesRefBuilder); + + int decodeInt(BytesRef bytes); + + void encodeDouble(double value, BytesRefBuilder bytesRefBuilder); + + double decodeDouble(BytesRef bytes); + + void encodeBoolean(boolean value, BytesRefBuilder bytesRefBuilder); + + boolean decodeBoolean(BytesRef bytes); + + int encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder); + + BytesRef decodeBytesRef(BytesRef bytes, BytesRef scratch); + + /** + * Get a version of this encoder that encodes values such that sorting + * the encoded bytes sorts by the values. + */ + TopNEncoder toSortable(); + + /** + * Get a version of this encoder that encodes values as fast as possible + * without making the encoded bytes sortable. + */ + TopNEncoder toUnsortable(); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java new file mode 100644 index 0000000000000..acc4e90de6339 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -0,0 +1,440 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.PriorityQueue; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Operator; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.BitSet; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +/** + * An operator that sorts "rows" of values by encoding the values to sort on, as bytes (using BytesRef). Each data type is encoded + * in a specific way, defined by methods of a TopNEncoder. All the values used to sort a specific row (think of column/block 3 + * and column/block 6) are converted/encoded in a byte array and the concatenated bytes are all compared in bulk. + * For now, the only values that have a special "treatment" when it comes to encoding are the text-based ones (text, keyword, ip, version). + * For each "special" encoding there is should be new TopNEncoder implementation. See {@link TopNEncoder#UTF8} for + * encoding regular "text" and "keyword" data types. See LocalExecutionPlanner for which data type uses which encoder. + * + * This Operator will not be able to sort binary values (encoded as BytesRef) because the bytes used as separator and "null"s can appear + * as valid bytes inside a binary value. + */ +public class TopNOperator implements Operator, Accountable { + private static final byte SMALL_NULL = 0x01; // "null" representation for "nulls first" + private static final byte BIG_NULL = 0x02; // "null" representation for "nulls last" + + /** + * Internal row to be used in the PriorityQueue instead of the full blown Page. + * It mirrors somehow the Block build in the sense that it keeps around an array of offsets and a count of values (to account for + * multivalues) to reference each position in each block of the Page. + */ + static final class Row implements Accountable { + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Row.class) + 2 * (RamUsageEstimator + .shallowSizeOfInstance(BytesRefBuilder.class) + RamUsageEstimator.shallowSizeOfInstance(BytesRef.class)) + RamUsageEstimator + .shallowSizeOfInstance(BitSet.class); + + final BytesRefBuilder keys = new BytesRefBuilder(); // BytesRef used to sort rows between each other + /** + * A true/false value (bit set/unset) for each byte in the BytesRef above corresponding to an asc/desc ordering. + * For ex, if a Long is represented as 8 bytes, each of these bytes will have the same value (set/unset) if the respective Long + * value is used for sorting ascending/descending. + */ + final BitSet orderByCompositeKeyAscending = new BitSet(); + + final BytesRefBuilder values = new BytesRefBuilder(); + + @Override + public long ramBytesUsed() { + return SHALLOW_SIZE + RamUsageEstimator.sizeOf(keys.bytes()) + orderByCompositeKeyAscending.size() / Byte.SIZE + + RamUsageEstimator.sizeOf(values.bytes()); + } + } + + record KeyFactory(KeyExtractor extractor, boolean ascending) {} + + static final class RowFactory { + private final ValueExtractor[] valueExtractors; + private final KeyFactory[] keyFactories; + + RowFactory(List elementTypes, List encoders, List sortOrders, Page page) { + valueExtractors = new ValueExtractor[page.getBlockCount()]; + for (int b = 0; b < valueExtractors.length; b++) { + valueExtractors[b] = ValueExtractor.extractorFor( + elementTypes.get(b), + encoders.get(b).toUnsortable(), + channelInKey(sortOrders, b), + page.getBlock(b) + ); + } + keyFactories = new KeyFactory[sortOrders.size()]; + for (int k = 0; k < keyFactories.length; k++) { + SortOrder so = sortOrders.get(k); + KeyExtractor extractor = KeyExtractor.extractorFor( + elementTypes.get(so.channel), + encoders.get(so.channel).toSortable(), + so.asc, + so.nul(), + so.nonNul(), + page.getBlock(so.channel) + ); + keyFactories[k] = new KeyFactory(extractor, so.asc); + } + } + + Row row(int position, Row spare) { + Row result; + if (spare == null) { + result = new Row(); + } else { + result = spare; + result.keys.clear(); + result.orderByCompositeKeyAscending.clear(); + result.values.clear(); + } + + writeKey(position, result); + writeValues(position, result.values); + + return result; + } + + private void writeKey(int position, Row row) { + int orderByCompositeKeyCurrentPosition = 0; + for (KeyFactory factory : keyFactories) { + int valueAsBytesSize = factory.extractor.writeKey(row.keys, position); + row.orderByCompositeKeyAscending.set( + orderByCompositeKeyCurrentPosition, + valueAsBytesSize + orderByCompositeKeyCurrentPosition, + factory.ascending + ); + orderByCompositeKeyCurrentPosition += valueAsBytesSize; + } + } + + private void writeValues(int position, BytesRefBuilder values) { + for (ValueExtractor e : valueExtractors) { + e.writeValue(values, position); + } + } + } + + public record SortOrder(int channel, boolean asc, boolean nullsFirst) { + + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(SortOrder.class); + + @Override + public String toString() { + return "SortOrder[channel=" + this.channel + ", asc=" + this.asc + ", nullsFirst=" + this.nullsFirst + "]"; + } + + byte nul() { + if (nullsFirst) { + return asc ? SMALL_NULL : BIG_NULL; + } else { + return asc ? BIG_NULL : SMALL_NULL; + } + } + + byte nonNul() { + if (nullsFirst) { + return asc ? BIG_NULL : SMALL_NULL; + } else { + return asc ? SMALL_NULL : BIG_NULL; + } + } + } + + public record TopNOperatorFactory( + int topCount, + List elementTypes, + List encoders, + List sortOrders, + int maxPageSize + ) implements OperatorFactory { + public TopNOperatorFactory + + { + for (ElementType e : elementTypes) { + if (e == null) { + throw new IllegalArgumentException("ElementType not known"); + } + } + } + + @Override + public TopNOperator get(DriverContext driverContext) { + return new TopNOperator(topCount, elementTypes, encoders, sortOrders, maxPageSize); + } + + @Override + public String describe() { + return "TopNOperator[count=" + + topCount + + ", elementTypes=" + + elementTypes + + ", encoders=" + + encoders + + ", sortOrders=" + + sortOrders + + "]"; + } + } + + private final Queue inputQueue; + + private final int maxPageSize; + + private final List elementTypes; + private final List encoders; + private final List sortOrders; + + private Iterator output; + + public TopNOperator( + int topCount, + List elementTypes, + List encoders, + List sortOrders, + int maxPageSize + ) { + this.maxPageSize = maxPageSize; + this.elementTypes = elementTypes; + this.encoders = encoders; + this.sortOrders = sortOrders; + this.inputQueue = new Queue(topCount); + } + + static int compareRows(Row r1, Row r2) { + // This is similar to r1.key.compareTo(r2.key) but stopping somewhere in the middle so that + // we check the byte that mismatched + BytesRef br1 = r1.keys.get(); + BytesRef br2 = r2.keys.get(); + int mismatchedByteIndex = Arrays.mismatch( + br1.bytes, + br1.offset, + br1.offset + br1.length, + br2.bytes, + br2.offset, + br2.offset + br2.length + ); + if (mismatchedByteIndex < 0) { + // the two rows are equal + return 0; + } + int length = Math.min(br1.length, br2.length); + // one value is the prefix of the other + if (mismatchedByteIndex == length) { + // the value with the greater length is considered greater than the other + if (length == br1.length) {// first row is less than the second row + return r2.orderByCompositeKeyAscending.get(length) ? 1 : -1; + } else {// second row is less than the first row + return r1.orderByCompositeKeyAscending.get(length) ? -1 : 1; + } + } else { + // compare the byte that mismatched accounting for that respective byte asc/desc ordering + int c = Byte.compareUnsigned( + r1.keys.bytes()[br1.offset + mismatchedByteIndex], + r2.keys.bytes()[br2.offset + mismatchedByteIndex] + ); + return r1.orderByCompositeKeyAscending.get(mismatchedByteIndex) ? -c : c; + } + } + + @Override + public boolean needsInput() { + return output == null; + } + + @Override + public void addInput(Page page) { + RowFactory rowFactory = new RowFactory(elementTypes, encoders, sortOrders, page); + + Row removed = null; + for (int i = 0; i < page.getPositionCount(); i++) { + Row x = rowFactory.row(i, removed); + removed = inputQueue.insertWithOverflow(x); + } + } + + @Override + public void finish() { + if (output == null) { + output = toPages(); + } + } + + private Iterator toPages() { + if (inputQueue.size() == 0) { + return Collections.emptyIterator(); + } + List list = new ArrayList<>(inputQueue.size()); + while (inputQueue.size() > 0) { + list.add(inputQueue.pop()); + } + Collections.reverse(list); + + List result = new ArrayList<>(); + ResultBuilder[] builders = null; + int p = 0; + int size = 0; + for (int i = 0; i < list.size(); i++) { + if (builders == null) { + size = Math.min(maxPageSize, list.size() - i); + builders = new ResultBuilder[elementTypes.size()]; + for (int b = 0; b < builders.length; b++) { + builders[b] = ResultBuilder.resultBuilderFor( + elementTypes.get(b), + encoders.get(b).toUnsortable(), + channelInKey(sortOrders, b), + size + ); + } + p = 0; + } + + Row row = list.get(i); + BytesRef keys = row.keys.get(); + for (SortOrder so : sortOrders) { + if (keys.bytes[keys.offset] == so.nul()) { + keys.offset++; + keys.length--; + continue; + } + keys.offset++; + keys.length--; + builders[so.channel].decodeKey(keys); + } + if (keys.length != 0) { + throw new IllegalArgumentException("didn't read all keys"); + } + + BytesRef values = row.values.get(); + for (ResultBuilder builder : builders) { + builder.decodeValue(values); + } + if (values.length != 0) { + throw new IllegalArgumentException("didn't read all values"); + } + + p++; + if (p == size) { + result.add(new Page(Arrays.stream(builders).map(ResultBuilder::build).toArray(Block[]::new))); + builders = null; + } + } + assert builders == null; + return result.iterator(); + } + + private static boolean channelInKey(List sortOrders, int channel) { + for (SortOrder so : sortOrders) { + if (so.channel == channel) { + return true; + } + } + return false; + } + + @Override + public boolean isFinished() { + return output != null && output.hasNext() == false; + } + + @Override + public Page getOutput() { + if (output != null && output.hasNext()) { + return output.next(); + } + return null; + } + + @Override + public void close() { + + } + + private static long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(TopNOperator.class) + RamUsageEstimator + .shallowSizeOfInstance(List.class) * 3; + + @Override + public long ramBytesUsed() { + // NOTE: this is ignoring the output iterator for now. Pages are not Accountable. Yet. + long arrHeader = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER; + long ref = RamUsageEstimator.NUM_BYTES_OBJECT_REF; + long size = SHALLOW_SIZE; + // These lists may slightly under-count, but it's not likely to be by much. + size += RamUsageEstimator.alignObjectSize(arrHeader + ref * elementTypes.size()); + size += RamUsageEstimator.alignObjectSize(arrHeader + ref * encoders.size()); + size += RamUsageEstimator.alignObjectSize(arrHeader + ref * sortOrders.size()); + size += sortOrders.size() * SortOrder.SHALLOW_SIZE; + size += inputQueue.ramBytesUsed(); + return size; + } + + @Override + public Status status() { + return new TopNOperatorStatus(inputQueue.size(), ramBytesUsed()); + } + + @Override + public String toString() { + return "TopNOperator[count=" + + inputQueue + + ", elementTypes=" + + elementTypes + + ", encoders=" + + encoders + + ", sortOrders=" + + sortOrders + + "]"; + } + + private static class Queue extends PriorityQueue implements Accountable { + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Queue.class); + private final int maxSize; + + Queue(int maxSize) { + super(maxSize); + this.maxSize = maxSize; + } + + @Override + protected boolean lessThan(Row r1, Row r2) { + return compareRows(r1, r2) < 0; + } + + @Override + public String toString() { + return size() + "/" + maxSize; + } + + @Override + public long ramBytesUsed() { + long total = SHALLOW_SIZE; + total += RamUsageEstimator.alignObjectSize( + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_OBJECT_REF * (maxSize + 1) + ); + for (Row r : this) { + total += r == null ? 0 : r.ramBytesUsed(); + } + return total; + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java new file mode 100644 index 0000000000000..1261332ea1423 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatus.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class TopNOperatorStatus implements Operator.Status { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Operator.Status.class, + "topn", + TopNOperatorStatus::new + ); + private final int occupiedRows; + private final long ramBytesUsed; + + public TopNOperatorStatus(int occupiedRows, long ramBytesUsed) { + this.occupiedRows = occupiedRows; + this.ramBytesUsed = ramBytesUsed; + } + + TopNOperatorStatus(StreamInput in) throws IOException { + this.occupiedRows = in.readVInt(); + this.ramBytesUsed = in.readVLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(occupiedRows); + out.writeVLong(ramBytesUsed); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + public int occupiedRows() { + return occupiedRows; + } + + public long ramBytesUsed() { + return ramBytesUsed; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("occupied_rows", occupiedRows); + builder.field("ram_bytes_used", ramBytesUsed); + builder.field("ram_used", ByteSizeValue.ofBytes(ramBytesUsed)); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + TopNOperatorStatus that = (TopNOperatorStatus) o; + return occupiedRows == that.occupiedRows && ramBytesUsed == that.ramBytesUsed; + } + + @Override + public int hashCode() { + return Objects.hash(occupiedRows, ramBytesUsed); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/UTF8TopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/UTF8TopNEncoder.java new file mode 100644 index 0000000000000..3692e3009dd45 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/UTF8TopNEncoder.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; + +import java.util.Arrays; + +/** + * Encodes utf-8 strings as {@code nul} terminated strings. + *

+ * Utf-8 can contain {@code nul} aka {@code 0x00} so we wouldn't be able + * to use that as a terminator. But we fix this by adding {@code 1} to all + * values less than the continuation byte. This removes some of the + * self-synchronizing nature of utf-8, but we don't need that here. When + * we decode we undo out munging so all consumers just get normal utf-8. + *

+ */ +final class UTF8TopNEncoder extends SortableTopNEncoder { + + private static final int CONTINUATION_BYTE = 0b1000_0000; + static final byte TERMINATOR = 0x00; + + @Override + public int encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { + // add one bit to every byte so that there are no "0" bytes in the provided bytes. The only "0" bytes are + // those defined as separators + int end = value.offset + value.length; + for (int i = value.offset; i < end; i++) { + byte b = value.bytes[i]; + if ((b & CONTINUATION_BYTE) == 0) { + b++; + } + bytesRefBuilder.append(b); + } + bytesRefBuilder.append(TERMINATOR); + return value.length + 1; + } + + @Override + public BytesRef decodeBytesRef(BytesRef bytes, BytesRef scratch) { + scratch.bytes = bytes.bytes; + scratch.offset = bytes.offset; + scratch.length = 0; + int i = bytes.offset; + decode: while (true) { + int leadByte = bytes.bytes[i] & 0xff; + int numBytes = utf8CodeLength[leadByte]; + switch (numBytes) { + case 0: + break decode; + case 1: + bytes.bytes[i]--; + i++; + break; + case 2: + i += 2; + break; + case 3: + i += 3; + break; + case 4: + i += 4; + break; + default: + throw new IllegalArgumentException("Invalid UTF8 header byte: 0x" + Integer.toHexString(leadByte)); + } + } + scratch.length = i - bytes.offset; + bytes.offset = i + 1; + bytes.length -= scratch.length + 1; + return scratch; + } + + @Override + public TopNEncoder toSortable() { + return this; + } + + @Override + public TopNEncoder toUnsortable() { + return this; + } + + @Override + public String toString() { + return "UTF8TopNEncoder"; + } + + // This section very inspired by Lucene's UnicodeUtil + static final int[] utf8CodeLength; + + static { + int v = Integer.MIN_VALUE; + + utf8CodeLength = Arrays.stream( + new int[][] { + // The next line differs from UnicodeUtil - the first entry is 0 because that's our terminator + { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, + // The next line differs from UnicodeUtil - the first entry is 1 because it's valid in our encoding. + { 1, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v }, + { v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v }, + { v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v }, + { v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v }, + { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }, + { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }, + { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }, + { 4, 4, 4, 4, 4, 4, 4, 4 /* , 5, 5, 5, 5, 6, 6, 0, 0 */ } } + ).flatMapToInt(Arrays::stream).toArray(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractor.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractor.java new file mode 100644 index 0000000000000..2369078643ae4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractor.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; + +/** + * Extracts values into a {@link BytesRefBuilder}. + */ +interface ValueExtractor { + void writeValue(BytesRefBuilder values, int position); + + static ValueExtractor extractorFor(ElementType elementType, TopNEncoder encoder, boolean inKey, Block block) { + if (false == (elementType == block.elementType() || ElementType.NULL == block.elementType())) { + throw new IllegalArgumentException("Expected [" + elementType + "] but was [" + block.elementType() + "]"); + } + return switch (block.elementType()) { + case BOOLEAN -> ValueExtractorForBoolean.extractorFor(encoder, inKey, (BooleanBlock) block); + case BYTES_REF -> ValueExtractorForBytesRef.extractorFor(encoder, inKey, (BytesRefBlock) block); + case INT -> ValueExtractorForInt.extractorFor(encoder, inKey, (IntBlock) block); + case LONG -> ValueExtractorForLong.extractorFor(encoder, inKey, (LongBlock) block); + case DOUBLE -> ValueExtractorForDouble.extractorFor(encoder, inKey, (DoubleBlock) block); + case NULL -> new ValueExtractorForNull(); + case DOC -> new ValueExtractorForDoc(encoder, ((DocBlock) block).asVector()); + default -> { + assert false : "No value extractor for [" + block.elementType() + "]"; + throw new UnsupportedOperationException("No value extractor for [" + block.elementType() + "]"); + } + }; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForDoc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForDoc.java new file mode 100644 index 0000000000000..733b9cd4ab708 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForDoc.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.DocVector; + +class ValueExtractorForDoc implements ValueExtractor { + private final DocVector vector; + + ValueExtractorForDoc(TopNEncoder encoder, DocVector vector) { + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE; + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeInt(vector.shards().getInt(position), values); + TopNEncoder.DEFAULT_UNSORTABLE.encodeInt(vector.segments().getInt(position), values); + TopNEncoder.DEFAULT_UNSORTABLE.encodeInt(vector.docs().getInt(position), values); + } + + @Override + public String toString() { + return "ValueExtractorForDoc"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForNull.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForNull.java new file mode 100644 index 0000000000000..967ede1b3f46d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForNull.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRefBuilder; + +class ValueExtractorForNull implements ValueExtractor { + @Override + public void writeValue(BytesRefBuilder values, int position) { + /* + * Write 0 values which can be read by *any* result builder and will always + * make a null value. + */ + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(0, values); + } + + @Override + public String toString() { + return "ValueExtractorForNull"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/VersionTopNEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/VersionTopNEncoder.java new file mode 100644 index 0000000000000..b6ce97586c449 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/VersionTopNEncoder.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; + +class VersionTopNEncoder extends SortableTopNEncoder { + @Override + public int encodeBytesRef(BytesRef value, BytesRefBuilder bytesRefBuilder) { + // TODO versions can contain nul so we need to delegate to the utf-8 encoder for the utf-8 parts of a version + for (int i = value.offset; i < value.length; i++) { + if (value.bytes[i] == UTF8TopNEncoder.TERMINATOR) { + throw new IllegalArgumentException("Can't sort versions containing nul"); + } + } + bytesRefBuilder.append(value); + bytesRefBuilder.append(UTF8TopNEncoder.TERMINATOR); + return value.length + 1; + } + + @Override + public BytesRef decodeBytesRef(BytesRef bytes, BytesRef scratch) { + int i = bytes.offset; + while (bytes.bytes[i] != UTF8TopNEncoder.TERMINATOR) { + i++; + } + scratch.bytes = bytes.bytes; + scratch.offset = bytes.offset; + scratch.length = i - bytes.offset; + bytes.offset += scratch.length + 1; + bytes.length -= scratch.length + 1; + return scratch; + } + + @Override + public String toString() { + return "VersionTopNEncoder"; + } + + @Override + public TopNEncoder toSortable() { + return this; + } + + @Override + public TopNEncoder toUnsortable() { + return this; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st new file mode 100644 index 0000000000000..28f452ccac1a7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st @@ -0,0 +1,224 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +$endif$ +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.$Type$Vector; + +abstract class KeyExtractorFor$Type$ implements KeyExtractor { + static KeyExtractorFor$Type$ extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, $Type$Block block) { + $Type$Vector v = block.asVector(); + if (v != null) { + return new KeyExtractorFor$Type$.ForVector(encoder, nul, nonNul, v); + } + if (ascending) { + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorFor$Type$.MinForAscending(encoder, nul, nonNul, block) + : new KeyExtractorFor$Type$.MinForUnordered(encoder, nul, nonNul, block); + } + return block.mvOrdering() == Block.MvOrdering.ASCENDING + ? new KeyExtractorFor$Type$.MaxForAscending(encoder, nul, nonNul, block) + : new KeyExtractorFor$Type$.MaxForUnordered(encoder, nul, nonNul, block); + } + +$if(BytesRef)$ + private final TopNEncoder encoder; + protected final BytesRef scratch = new BytesRef(); +$endif$ + private final byte nul; + private final byte nonNul; + + KeyExtractorFor$Type$(TopNEncoder encoder, byte nul, byte nonNul) { +$if(BytesRef)$ + this.encoder = encoder; +$else$ + assert encoder == TopNEncoder.DEFAULT_SORTABLE; +$endif$ + this.nul = nul; + this.nonNul = nonNul; + } + + protected final int nonNul(BytesRefBuilder key, $type$ value) { + key.append(nonNul); +$if(BytesRef)$ + return encoder.encodeBytesRef(value, key) + 1; +$elseif(boolean)$ + TopNEncoder.DEFAULT_SORTABLE.encodeBoolean(value, key); + return Byte.BYTES + 1; +$else$ + TopNEncoder.DEFAULT_SORTABLE.encode$Type$(value, key); + return $BYTES$ + 1; +$endif$ + } + + protected final int nul(BytesRefBuilder key) { + key.append(nul); + return 1; + } + + static class ForVector extends KeyExtractorFor$Type$ { + private final $Type$Vector vector; + + ForVector(TopNEncoder encoder, byte nul, byte nonNul, $Type$Vector vector) { + super(encoder, nul, nonNul); + this.vector = vector; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { +$if(BytesRef)$ + return nonNul(key, vector.get$Type$(position, scratch)); +$else$ + return nonNul(key, vector.get$Type$(position)); +$endif$ + } + } + + static class MinForAscending extends KeyExtractorFor$Type$ { + private final $Type$Block block; + + MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } +$if(BytesRef)$ + return nonNul(key, block.get$Type$(block.getFirstValueIndex(position), scratch)); +$else$ + return nonNul(key, block.get$Type$(block.getFirstValueIndex(position))); +$endif$ + } + } + + static class MaxForAscending extends KeyExtractorFor$Type$ { + private final $Type$Block block; + + MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + if (block.isNull(position)) { + return nul(key); + } +$if(BytesRef)$ + return nonNul(key, block.get$Type$(block.getFirstValueIndex(position) + block.getValueCount(position) - 1, scratch)); +$else$ + return nonNul(key, block.get$Type$(block.getFirstValueIndex(position) + block.getValueCount(position) - 1)); +$endif$ + } + } + + static class MinForUnordered extends KeyExtractorFor$Type$ { + private final $Type$Block block; + +$if(BytesRef)$ + private final BytesRef minScratch = new BytesRef(); +$endif$ + + MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; +$if(BytesRef)$ + BytesRef min = block.getBytesRef(start, minScratch); + for (int i = start; i < end; i++) { + BytesRef v = block.getBytesRef(i, scratch); + if (v.compareTo(min) < 0) { + min.bytes = v.bytes; + min.offset = v.offset; + min.length = v.length; + } + } + return nonNul(key, min); +$elseif(boolean)$ + for (int i = start; i < end; i++) { + if (block.getBoolean(i) == false) { + return nonNul(key, false); + } + } + return nonNul(key, true); +$else$ + $type$ min = block.get$Type$(start); + for (int i = start + 1; i < end; i++) { + min = Math.min(min, block.get$Type$(i)); + } + return nonNul(key, min); +$endif$ + } + } + + static class MaxForUnordered extends KeyExtractorFor$Type$ { + private final $Type$Block block; + +$if(BytesRef)$ + private final BytesRef maxScratch = new BytesRef(); +$endif$ + + MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + super(encoder, nul, nonNul); + this.block = block; + } + + @Override + public int writeKey(BytesRefBuilder key, int position) { + int size = block.getValueCount(position); + if (size == 0) { + return nul(key); + } + int start = block.getFirstValueIndex(position); + int end = start + size; +$if(BytesRef)$ + BytesRef max = block.getBytesRef(start, maxScratch); + for (int i = start; i < end; i++) { + BytesRef v = block.getBytesRef(i, scratch); + if (v.compareTo(max) > 0) { + max.bytes = v.bytes; + max.offset = v.offset; + max.length = v.length; + } + } + return nonNul(key, max); +$elseif(boolean)$ + for (int i = start; i < end; i++) { + if (block.getBoolean(i)) { + return nonNul(key, true); + } + } + return nonNul(key, false); +$else$ + $type$ max = block.get$Type$(start); + for (int i = start + 1; i < end; i++) { + max = Math.max(max, block.get$Type$(i)); + } + return nonNul(key, max); +$endif$ + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st new file mode 100644 index 0000000000000..5f9a35bd0ebd3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.$Type$Block; + +class ResultBuilderFor$Type$ implements ResultBuilder { + private final $Type$Block.Builder builder; + + private final boolean inKey; + +$if(BytesRef)$ + private final TopNEncoder encoder; + + private final BytesRef scratch = new BytesRef(); +$endif$ + + /** + * The value previously set by {@link #decodeKey}. + */ + private $type$ key; + + ResultBuilderFor$Type$(TopNEncoder encoder, boolean inKey, int initialSize) { +$if(BytesRef)$ + this.encoder = encoder; +$else$ + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); +$endif$ + this.inKey = inKey; + this.builder = $Type$Block.newBlockBuilder(initialSize); + } + + @Override + public void decodeKey(BytesRef keys) { + assert inKey; +$if(BytesRef)$ + key = encoder.toSortable().decodeBytesRef(keys, scratch); +$else$ + key = TopNEncoder.DEFAULT_SORTABLE.decode$Type$(keys); +$endif$ + } + + @Override + public void decodeValue(BytesRef values) { + int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values); + switch (count) { + case 0 -> { + builder.appendNull(); + } + case 1 -> builder.append$Type$(inKey ? key : readValueFromValues(values)); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < count; i++) { + builder.append$Type$(readValueFromValues(values)); + } + builder.endPositionEntry(); + } + } + } + + private $type$ readValueFromValues(BytesRef values) { +$if(BytesRef)$ + return encoder.toUnsortable().decodeBytesRef(values, scratch); +$else$ + return TopNEncoder.DEFAULT_UNSORTABLE.decode$Type$(values); +$endif$ + } + + @Override + public $Type$Block build() { + return builder.build(); + } + + @Override + public String toString() { + return "ResultBuilderFor$Type$[inKey=" + inKey + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st new file mode 100644 index 0000000000000..3a8792387b142 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +$endif$ +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.$Type$Vector; + +abstract class ValueExtractorFor$Type$ implements ValueExtractor { + static ValueExtractorFor$Type$ extractorFor(TopNEncoder encoder, boolean inKey, $Type$Block block) { + $Type$Vector vector = block.asVector(); + if (vector != null) { + return new ValueExtractorFor$Type$.ForVector(encoder, inKey, vector); + } + return new ValueExtractorFor$Type$.ForBlock(encoder, inKey, block); + } + +$if(BytesRef)$ + private final TopNEncoder encoder; + + protected final BytesRef scratch = new BytesRef(); +$endif$ + + protected final boolean inKey; + + ValueExtractorFor$Type$(TopNEncoder encoder, boolean inKey) { +$if(BytesRef)$ + this.encoder = encoder; +$else$ + assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); +$endif$ + this.inKey = inKey; + } + + protected final void writeCount(BytesRefBuilder values, int count) { + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(count, values); + } + + protected final void actualWriteValue(BytesRefBuilder values, $type$ value) { +$if(BytesRef)$ + encoder.encodeBytesRef(value, values); +$else$ + TopNEncoder.DEFAULT_UNSORTABLE.encode$Type$(value, values); +$endif$ + } + + static class ForVector extends ValueExtractorFor$Type$ { + private final $Type$Vector vector; + + ForVector(TopNEncoder encoder, boolean inKey, $Type$Vector vector) { + super(encoder, inKey); + this.vector = vector; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + writeCount(values, 1); + if (inKey) { + // will read results from the key + return; + } +$if(BytesRef)$ + actualWriteValue(values, vector.get$Type$(position, scratch)); +$else$ + actualWriteValue(values, vector.get$Type$(position)); +$endif$ + } + } + + static class ForBlock extends ValueExtractorFor$Type$ { + private final $Type$Block block; + + ForBlock(TopNEncoder encoder, boolean inKey, $Type$Block block) { + super(encoder, inKey); + this.block = block; + } + + @Override + public void writeValue(BytesRefBuilder values, int position) { + int size = block.getValueCount(position); + writeCount(values, size); + if (size == 1 && inKey) { + // Will read results from the key + return; + } + int start = block.getFirstValueIndex(position); + int end = start + size; + for (int i = start; i < end; i++) { +$if(BytesRef)$ + actualWriteValue(values, block.getBytesRef(i, scratch)); +$else$ + actualWriteValue(values, block.get$Type$(i)); +$endif$ + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoderTests.java new file mode 100644 index 0000000000000..9f46399546a09 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/DefaultUnsortableTopNEncoderTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class DefaultUnsortableTopNEncoderTests extends ESTestCase { + public void testVIntSmall() { + testVInt(between(0, 127), 1); + } + + public void testVIntMed() { + testVInt(between(128, 16383), 2); + } + + public void testVIntBig() { + testVInt(between(16384, 2097151), 3); + } + + public void testVIntBigger() { + testVInt(between(2097152, 268435455), 4); + } + + public void testVIntBiggest() { + testVInt(between(268435456, Integer.MAX_VALUE), 5); + } + + public void testVIntNegative() { + testVInt(between(Integer.MIN_VALUE, -1), 5); + } + + private void testVInt(int v, int expectedBytes) { + BytesRefBuilder builder = new BytesRefBuilder(); + TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(v, builder); + assertThat(builder.length(), equalTo(expectedBytes)); + BytesRef bytes = builder.toBytesRef(); + assertThat(TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(bytes), equalTo(v)); + assertThat(bytes.length, equalTo(0)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java new file mode 100644 index 0000000000000..5fbb44f1fac0b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockTestUtils; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class ExtractorTests extends ESTestCase { + @ParametersFactory + public static Iterable parameters() { + List cases = new ArrayList<>(); + for (ElementType e : ElementType.values()) { + switch (e) { + case UNKNOWN -> { + } + case BYTES_REF -> { + cases.add(valueTestCase("single alpha", e, TopNEncoder.UTF8, () -> randomAlphaOfLength(5))); + cases.add(valueTestCase("many alpha", e, TopNEncoder.UTF8, () -> randomList(2, 10, () -> randomAlphaOfLength(5)))); + cases.add(valueTestCase("single utf8", e, TopNEncoder.UTF8, () -> randomRealisticUnicodeOfLength(10))); + cases.add( + valueTestCase("many utf8", e, TopNEncoder.UTF8, () -> randomList(2, 10, () -> randomRealisticUnicodeOfLength(10))) + ); + cases.add(valueTestCase("single version", e, TopNEncoder.VERSION, () -> TopNEncoderTests.randomVersion().toBytesRef())); + cases.add( + valueTestCase( + "many version", + e, + TopNEncoder.VERSION, + () -> randomList(2, 10, () -> TopNEncoderTests.randomVersion().toBytesRef()) + ) + ); + cases.add( + valueTestCase( + "single IP", + e, + TopNEncoder.IP, + () -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))) + ) + ); + cases.add( + valueTestCase( + "many IP", + e, + TopNEncoder.IP, + () -> randomList(2, 10, () -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean())))) + ) + ); + } + case DOC -> cases.add( + new Object[] { + new TestCase( + "doc", + e, + TopNEncoder.DEFAULT_UNSORTABLE, + () -> new DocVector( + IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), + IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), + IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), + randomBoolean() ? null : randomBoolean() + ).asBlock() + ) } + ); + case NULL -> cases.add(valueTestCase("null", e, TopNEncoder.DEFAULT_UNSORTABLE, () -> null)); + default -> { + cases.add(valueTestCase("single " + e, e, TopNEncoder.DEFAULT_UNSORTABLE, () -> BlockTestUtils.randomValue(e))); + cases.add( + valueTestCase( + "many " + e, + e, + TopNEncoder.DEFAULT_UNSORTABLE, + () -> randomList(2, 10, () -> BlockTestUtils.randomValue(e)) + ) + ); + } + } + } + return cases; + } + + static Object[] valueTestCase(String name, ElementType type, TopNEncoder encoder, Supplier value) { + return new Object[] { new TestCase(name, type, encoder, () -> BlockUtils.fromListRow(Arrays.asList(value.get()))[0]) }; + } + + static class TestCase { + private final String name; + private final ElementType type; + private final TopNEncoder encoder; + private final Supplier value; + + TestCase(String name, ElementType type, TopNEncoder encoder, Supplier value) { + this.name = name; + this.type = type; + this.encoder = encoder; + this.value = value; + } + + @Override + public String toString() { + return name; + } + } + + private final TestCase testCase; + + public ExtractorTests(TestCase testCase) { + this.testCase = testCase; + } + + public void testNotInKey() { + Block value = testCase.value.get(); + + BytesRefBuilder valuesBuilder = new BytesRefBuilder(); + ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), false, value).writeValue(valuesBuilder, 0); + assertThat(valuesBuilder.length(), greaterThan(0)); + + ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), false, 1); + BytesRef values = valuesBuilder.get(); + result.decodeValue(values); + assertThat(values.length, equalTo(0)); + + assertThat(result.build(), equalTo(value)); + } + + public void testInKey() { + assumeFalse("can't sort on _doc", testCase.type == ElementType.DOC); + Block value = testCase.value.get(); + + BytesRefBuilder keysBuilder = new BytesRefBuilder(); + KeyExtractor.extractorFor(testCase.type, testCase.encoder.toSortable(), randomBoolean(), randomByte(), randomByte(), value) + .writeKey(keysBuilder, 0); + assertThat(keysBuilder.length(), greaterThan(0)); + + BytesRefBuilder valuesBuilder = new BytesRefBuilder(); + ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), true, value).writeValue(valuesBuilder, 0); + assertThat(valuesBuilder.length(), greaterThan(0)); + + ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), true, 1); + BytesRef keys = keysBuilder.get(); + if (testCase.type == ElementType.NULL) { + assertThat(keys.length, equalTo(1)); + } else { + // Skip the non-null byte + keys.offset++; + keys.length--; + result.decodeKey(keys); + assertThat(keys.length, equalTo(0)); + } + BytesRef values = valuesBuilder.get(); + result.decodeValue(values); + assertThat(values.length, equalTo(0)); + + assertThat(result.build(), equalTo(value)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java new file mode 100644 index 0000000000000..5f1dbc887a1d7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.versionfield.Version; + +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class TopNEncoderTests extends ESTestCase { + @ParametersFactory + public static Iterable parameters() { + return List.of( + new Object[] { TopNEncoder.DEFAULT_SORTABLE }, + new Object[] { TopNEncoder.UTF8 }, + new Object[] { TopNEncoder.VERSION }, + new Object[] { TopNEncoder.IP }, + new Object[] { TopNEncoder.DEFAULT_UNSORTABLE } + ); + } + + private final TopNEncoder encoder; + + public TopNEncoderTests(TopNEncoder encoder) { + this.encoder = encoder; + } + + public void testLong() { + BytesRefBuilder builder = new BytesRefBuilder(); + long v = randomLong(); + encoder.encodeLong(v, builder); + BytesRef encoded = builder.toBytesRef(); + assertThat(encoder.decodeLong(encoded), equalTo(v)); + assertThat(encoded.length, equalTo(0)); + } + + public void testInt() { + BytesRefBuilder builder = new BytesRefBuilder(); + int v = randomInt(); + encoder.encodeInt(v, builder); + BytesRef encoded = builder.toBytesRef(); + assertThat(encoder.decodeInt(encoded), equalTo(v)); + assertThat(encoded.length, equalTo(0)); + } + + public void testDouble() { + BytesRefBuilder builder = new BytesRefBuilder(); + double v = randomDouble(); + encoder.encodeDouble(v, builder); + BytesRef encoded = builder.toBytesRef(); + assertThat(encoder.decodeDouble(encoded), equalTo(v)); + assertThat(encoded.length, equalTo(0)); + } + + public void testBoolean() { + BytesRefBuilder builder = new BytesRefBuilder(); + boolean v = randomBoolean(); + encoder.encodeBoolean(v, builder); + BytesRef encoded = builder.toBytesRef(); + assertThat(encoder.decodeBoolean(encoded), equalTo(v)); + assertThat(encoded.length, equalTo(0)); + } + + public void testAlpha() { + assumeTrue("unsupported", encoder == TopNEncoder.UTF8); + roundTripBytesRef(new BytesRef(randomAlphaOfLength(6))); + } + + public void testUtf8() { + assumeTrue("unsupported", encoder == TopNEncoder.UTF8); + roundTripBytesRef(new BytesRef(randomRealisticUnicodeOfLength(6))); + } + + /** + * Round trip the highest unicode character to encode without a continuation. + */ + public void testDel() { + assumeTrue("unsupported", encoder == TopNEncoder.UTF8); + roundTripBytesRef(new BytesRef("\u007F")); + } + + /** + * Round trip the lowest unicode character to encode using a continuation byte. + */ + public void testPaddingCharacter() { + assumeTrue("unsupported", encoder == TopNEncoder.UTF8); + roundTripBytesRef(new BytesRef("\u0080")); + } + + public void testVersion() { + assumeTrue("unsupported", encoder == TopNEncoder.VERSION); + roundTripBytesRef(randomVersion().toBytesRef()); + } + + public void testIp() { + assumeTrue("unsupported", encoder == TopNEncoder.IP); + roundTripBytesRef(new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean())))); + } + + private void roundTripBytesRef(BytesRef v) { + BytesRefBuilder builder = new BytesRefBuilder(); + int reportedSize = encoder.encodeBytesRef(v, builder); + BytesRef encoded = builder.toBytesRef(); + assertThat(encoded.length, equalTo(reportedSize)); + assertThat(encoder.decodeBytesRef(encoded, new BytesRef()), equalTo(v)); + assertThat(encoded.length, equalTo(0)); + } + + static Version randomVersion() { + // TODO degenerate versions and stuff + return switch (between(0, 3)) { + case 0 -> new Version(Integer.toString(between(0, 100))); + case 1 -> new Version(between(0, 100) + "." + between(0, 100)); + case 2 -> new Version(between(0, 100) + "." + between(0, 100) + "." + between(0, 100)); + case 3 -> TopNOperatorTests.randomVersion(); + default -> throw new IllegalArgumentException(); + }; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java new file mode 100644 index 0000000000000..f52274b68bdf6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorStatusTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class TopNOperatorStatusTests extends AbstractWireSerializingTestCase { + public void testToXContent() { + assertThat(Strings.toString(new TopNOperatorStatus(10, 2000)), equalTo(""" + {"occupied_rows":10,"ram_bytes_used":2000,"ram_used":"1.9kb"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return TopNOperatorStatus::new; + } + + @Override + protected TopNOperatorStatus createTestInstance() { + return new TopNOperatorStatus(randomNonNegativeInt(), randomNonNegativeLong()); + } + + @Override + protected TopNOperatorStatus mutateInstance(TopNOperatorStatus instance) { + int occupiedRows = instance.occupiedRows(); + long ramBytesUsed = instance.ramBytesUsed(); + switch (between(0, 1)) { + case 0: + occupiedRows = randomValueOtherThan(occupiedRows, () -> randomNonNegativeInt()); + break; + case 1: + ramBytesUsed = randomValueOtherThan(ramBytesUsed, () -> randomNonNegativeLong()); + break; + default: + throw new IllegalArgumentException(); + } + return new TopNOperatorStatus(occupiedRows, ramBytesUsed); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java similarity index 68% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TopNOperatorTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 4dffd07e23acc..fee7b4d336270 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -5,9 +5,10 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.operator.topn; import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.tests.util.RamUsageTester; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.unit.ByteSizeValue; @@ -15,12 +16,22 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockBuilder; +import org.elasticsearch.compute.operator.CannedSourceOperator; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.OperatorTestCase; +import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.TupleBlockSourceOperator; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.versionfield.Version; @@ -31,11 +42,8 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -53,12 +61,13 @@ import static org.elasticsearch.compute.data.ElementType.DOUBLE; import static org.elasticsearch.compute.data.ElementType.INT; import static org.elasticsearch.compute.data.ElementType.LONG; -import static org.elasticsearch.compute.operator.TopNOperator.BYTESREF_FIXED_LENGTH_ENCODER; -import static org.elasticsearch.compute.operator.TopNOperator.BYTESREF_UTF8_ENCODER; -import static org.elasticsearch.compute.operator.TopNOperator.DEFAULT_ENCODER; +import static org.elasticsearch.compute.operator.topn.TopNEncoder.DEFAULT_SORTABLE; +import static org.elasticsearch.compute.operator.topn.TopNEncoder.DEFAULT_UNSORTABLE; +import static org.elasticsearch.compute.operator.topn.TopNEncoder.UTF8; import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -66,7 +75,6 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class TopNOperatorTests extends OperatorTestCase { - private final int pageSize = randomPageSize(); // versions taken from org.elasticsearch.xpack.versionfield.VersionTests private static final List VERSIONS = List.of( @@ -109,18 +117,26 @@ public class TopNOperatorTests extends OperatorTestCase { ); @Override - protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return new TopNOperator.TopNOperatorFactory(4, List.of(new TopNOperator.SortOrder(0, true, false)), pageSize); + protected TopNOperator.TopNOperatorFactory simple(BigArrays bigArrays) { + return new TopNOperator.TopNOperatorFactory( + 4, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false)), + pageSize + ); } @Override protected String expectedDescriptionOfSimple() { - return "TopNOperator[count = 4, sortOrders = [SortOrder[channel=0, asc=true, nullsFirst=false, " + "encoder=DefaultEncoder]]]"; + return "TopNOperator[count=4, elementTypes=[LONG], encoders=[DefaultUnsortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]"; } @Override protected String expectedToStringOfSimple() { - return "TopNOperator[count = 0/4, sortOrder = SortOrder[channel=0, asc=true, nullsFirst=false, " + "encoder=DefaultEncoder]]"; + return "TopNOperator[count=0/4, elementTypes=[LONG], encoders=[DefaultUnsortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]"; } @Override @@ -157,332 +173,241 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { return ByteSizeValue.ZERO; } + public void testRamBytesUsed() { + int topCount = 10_000; + // We under-count by a few bytes because of the lists. In that end that's fine, but we need to account for it here. + long underCount = 100; + TopNOperator op = new TopNOperator.TopNOperatorFactory( + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false)), + pageSize + ).get(new DriverContext()); + long actualEmpty = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE); + assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); + // But when we fill it then we're quite close + for (Page p : CannedSourceOperator.collectPages(simpleInput(topCount))) { + op.addInput(p); + } + long actualFull = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(BYTES_REF) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE); + assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); + } + public void testRandomTopN() { for (boolean asc : List.of(true, false)) { int limit = randomIntBetween(1, 20); List inputValues = randomList(0, 5000, ESTestCase::randomLong); Comparator comparator = asc ? naturalOrder() : reverseOrder(); List expectedValues = inputValues.stream().sorted(comparator).limit(limit).toList(); - List outputValues = topN(inputValues, limit, asc, false); + List outputValues = topNLong(inputValues, limit, asc, false); assertThat(outputValues, equalTo(expectedValues)); } } public void testBasicTopN() { List values = Arrays.asList(2L, 1L, 4L, null, 5L, 10L, null, 20L, 4L, 100L); - assertThat(topN(values, 1, true, false), equalTo(Arrays.asList(1L))); - assertThat(topN(values, 1, false, false), equalTo(Arrays.asList(100L))); - assertThat(topN(values, 2, true, false), equalTo(Arrays.asList(1L, 2L))); - assertThat(topN(values, 2, false, false), equalTo(Arrays.asList(100L, 20L))); - assertThat(topN(values, 3, true, false), equalTo(Arrays.asList(1L, 2L, 4L))); - assertThat(topN(values, 3, false, false), equalTo(Arrays.asList(100L, 20L, 10L))); - assertThat(topN(values, 4, true, false), equalTo(Arrays.asList(1L, 2L, 4L, 4L))); - assertThat(topN(values, 4, false, false), equalTo(Arrays.asList(100L, 20L, 10L, 5L))); - assertThat(topN(values, 100, true, false), equalTo(Arrays.asList(1L, 2L, 4L, 4L, 5L, 10L, 20L, 100L, null, null))); - assertThat(topN(values, 100, false, false), equalTo(Arrays.asList(100L, 20L, 10L, 5L, 4L, 4L, 2L, 1L, null, null))); - assertThat(topN(values, 1, true, true), equalTo(Arrays.asList(new Long[] { null }))); - assertThat(topN(values, 1, false, true), equalTo(Arrays.asList(new Long[] { null }))); - assertThat(topN(values, 2, true, true), equalTo(Arrays.asList(null, null))); - assertThat(topN(values, 2, false, true), equalTo(Arrays.asList(null, null))); - assertThat(topN(values, 3, true, true), equalTo(Arrays.asList(null, null, 1L))); - assertThat(topN(values, 3, false, true), equalTo(Arrays.asList(null, null, 100L))); - assertThat(topN(values, 4, true, true), equalTo(Arrays.asList(null, null, 1L, 2L))); - assertThat(topN(values, 4, false, true), equalTo(Arrays.asList(null, null, 100L, 20L))); - assertThat(topN(values, 100, true, true), equalTo(Arrays.asList(null, null, 1L, 2L, 4L, 4L, 5L, 10L, 20L, 100L))); - assertThat(topN(values, 100, false, true), equalTo(Arrays.asList(null, null, 100L, 20L, 10L, 5L, 4L, 4L, 2L, 1L))); + assertThat(topNLong(values, 1, true, false), equalTo(Arrays.asList(1L))); + assertThat(topNLong(values, 1, false, false), equalTo(Arrays.asList(100L))); + assertThat(topNLong(values, 2, true, false), equalTo(Arrays.asList(1L, 2L))); + assertThat(topNLong(values, 2, false, false), equalTo(Arrays.asList(100L, 20L))); + assertThat(topNLong(values, 3, true, false), equalTo(Arrays.asList(1L, 2L, 4L))); + assertThat(topNLong(values, 3, false, false), equalTo(Arrays.asList(100L, 20L, 10L))); + assertThat(topNLong(values, 4, true, false), equalTo(Arrays.asList(1L, 2L, 4L, 4L))); + assertThat(topNLong(values, 4, false, false), equalTo(Arrays.asList(100L, 20L, 10L, 5L))); + assertThat(topNLong(values, 100, true, false), equalTo(Arrays.asList(1L, 2L, 4L, 4L, 5L, 10L, 20L, 100L, null, null))); + assertThat(topNLong(values, 100, false, false), equalTo(Arrays.asList(100L, 20L, 10L, 5L, 4L, 4L, 2L, 1L, null, null))); + assertThat(topNLong(values, 1, true, true), equalTo(Arrays.asList(new Long[] { null }))); + assertThat(topNLong(values, 1, false, true), equalTo(Arrays.asList(new Long[] { null }))); + assertThat(topNLong(values, 2, true, true), equalTo(Arrays.asList(null, null))); + assertThat(topNLong(values, 2, false, true), equalTo(Arrays.asList(null, null))); + assertThat(topNLong(values, 3, true, true), equalTo(Arrays.asList(null, null, 1L))); + assertThat(topNLong(values, 3, false, true), equalTo(Arrays.asList(null, null, 100L))); + assertThat(topNLong(values, 4, true, true), equalTo(Arrays.asList(null, null, 1L, 2L))); + assertThat(topNLong(values, 4, false, true), equalTo(Arrays.asList(null, null, 100L, 20L))); + assertThat(topNLong(values, 100, true, true), equalTo(Arrays.asList(null, null, 1L, 2L, 4L, 4L, 5L, 10L, 20L, 100L))); + assertThat(topNLong(values, 100, false, true), equalTo(Arrays.asList(null, null, 100L, 20L, 10L, 5L, 4L, 4L, 2L, 1L))); } - public void testCompareInts() { - Block[] bs = new Block[] { - IntBlock.newBlockBuilder(2).appendInt(Integer.MIN_VALUE).appendInt(randomIntBetween(-1000, -1)).build(), - IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(-1000, -1)).appendInt(0).build(), - IntBlock.newBlockBuilder(2).appendInt(0).appendInt(randomIntBetween(1, 1000)).build(), - IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(1, 1000)).appendInt(Integer.MAX_VALUE).build(), - IntBlock.newBlockBuilder(2).appendInt(Integer.MAX_VALUE).appendInt(0).build() }; - - Page page = new Page(bs); - TopNOperator.RowFactory rowFactory = new TopNOperator.RowFactory(page); - - Block nullBlock = Block.constantNullBlock(1); - Block[] nullBs = new Block[] { nullBlock, nullBlock, nullBlock, nullBlock, nullBlock }; - Page nullPage = new Page(nullBs); - TopNOperator.RowFactory nullRowFactory = new TopNOperator.RowFactory(page); - - for (int i = 0; i < bs.length; i++) { - Tuple rows = nonBytesRefRows( - randomBoolean(), - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - null, - i - ); - assertEquals(0, TopNOperator.compareRows(rows.v1(), rows.v1())); - - rows = nonBytesRefRows( - randomBoolean(), - true, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - i - ); - assertEquals(-1, TopNOperator.compareRows(rows.v1(), rows.v2())); - - rows = nonBytesRefRows( - randomBoolean(), - false, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - i - ); - assertEquals(1, TopNOperator.compareRows(rows.v1(), rows.v2())); - - rows = nonBytesRefRows( - randomBoolean(), - true, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - i - ); - assertEquals(1, TopNOperator.compareRows(rows.v2(), rows.v1())); - - rows = nonBytesRefRows( - randomBoolean(), - false, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - i - ); - assertEquals(-1, TopNOperator.compareRows(rows.v2(), rows.v1())); - } - for (int i = 0; i < bs.length - 1; i++) { - Tuple rows = nonBytesRefRows( - true, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - i - ); - assertThat(TopNOperator.compareRows(rows.v1(), rows.v2()), greaterThan(0)); - rows = nonBytesRefRows( - true, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - i - ); - assertThat(TopNOperator.compareRows(rows.v2(), rows.v1()), lessThan(0)); - rows = nonBytesRefRows( - false, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - i - ); - assertThat(TopNOperator.compareRows(rows.v1(), rows.v2()), lessThan(0)); - rows = nonBytesRefRows( - false, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - i - ); - assertThat(TopNOperator.compareRows(rows.v2(), rows.v1()), greaterThan(0)); - } + private List topNLong(List inputValues, int limit, boolean ascendingOrder, boolean nullsFirst) { + return topNTwoColumns( + inputValues.stream().map(v -> tuple(v, 0L)).toList(), + limit, + List.of(LONG, LONG), + List.of(DEFAULT_UNSORTABLE, DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, ascendingOrder, nullsFirst)) + ).stream().map(Tuple::v1).toList(); } - private Tuple nonBytesRefRows( - boolean asc, - boolean nullsFirst, - Function, TopNOperator.Row> row1, - Function, TopNOperator.Row> row2, - int position - ) { - return rows(asc, nullsFirst, row1, row2, position, DEFAULT_ENCODER); + public void testCompareInts() { + testCompare( + new Page( + new Block[] { + IntBlock.newBlockBuilder(2).appendInt(Integer.MIN_VALUE).appendInt(randomIntBetween(-1000, -1)).build(), + IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(-1000, -1)).appendInt(0).build(), + IntBlock.newBlockBuilder(2).appendInt(0).appendInt(randomIntBetween(1, 1000)).build(), + IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(1, 1000)).appendInt(Integer.MAX_VALUE).build(), + IntBlock.newBlockBuilder(2).appendInt(0).appendInt(Integer.MAX_VALUE).build() } + ), + INT, + DEFAULT_SORTABLE + ); } - private Tuple bytesRefRows( - boolean asc, - boolean nullsFirst, - Function, TopNOperator.Row> row1, - Function, TopNOperator.Row> row2, - int position - ) { - return rows(asc, nullsFirst, row1, row2, position, BYTESREF_UTF8_ENCODER); + public void testCompareLongs() { + testCompare( + new Page( + new Block[] { + LongBlock.newBlockBuilder(2).appendLong(Long.MIN_VALUE).appendLong(randomLongBetween(-1000, -1)).build(), + LongBlock.newBlockBuilder(2).appendLong(randomLongBetween(-1000, -1)).appendLong(0).build(), + LongBlock.newBlockBuilder(2).appendLong(0).appendLong(randomLongBetween(1, 1000)).build(), + LongBlock.newBlockBuilder(2).appendLong(randomLongBetween(1, 1000)).appendLong(Long.MAX_VALUE).build(), + LongBlock.newBlockBuilder(2).appendLong(0).appendLong(Long.MAX_VALUE).build() } + ), + LONG, + DEFAULT_SORTABLE + ); } - private Tuple rows( - boolean asc, - boolean nullsFirst, - Function, TopNOperator.Row> row1, - Function, TopNOperator.Row> row2, - int position, - TopNEncoder encoder - ) { - List so = List.of(new TopNOperator.SortOrder(position, asc, nullsFirst, encoder)); - return new Tuple<>(row1 == null ? null : row1.apply(so), row2 == null ? null : row2.apply(so)); + public void testCompareDoubles() { + testCompare( + new Page( + new Block[] { + DoubleBlock.newBlockBuilder(2) + .appendDouble(-Double.MAX_VALUE) + .appendDouble(randomDoubleBetween(-1000, -1, true)) + .build(), + DoubleBlock.newBlockBuilder(2).appendDouble(randomDoubleBetween(-1000, -1, true)).appendDouble(0.0).build(), + DoubleBlock.newBlockBuilder(2).appendDouble(0).appendDouble(randomDoubleBetween(1, 1000, true)).build(), + DoubleBlock.newBlockBuilder(2).appendDouble(randomLongBetween(1, 1000)).appendDouble(Double.MAX_VALUE).build(), + DoubleBlock.newBlockBuilder(2).appendDouble(0.0).appendDouble(Double.MAX_VALUE).build() } + ), + DOUBLE, + DEFAULT_SORTABLE + ); } - public void testCompareBytesRef() { - Block[] bs = new Block[] { - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("bye")).appendBytesRef(new BytesRef("hello")).build() }; - Page page = new Page(bs); - TopNOperator.RowFactory rowFactory = new TopNOperator.RowFactory(page); - - Tuple rows = bytesRefRows( - false, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - null, - 0 + public void testCompareUtf8() { + testCompare( + new Page( + new Block[] { + BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("bye")).appendBytesRef(new BytesRef("hello")).build() } + ), + BYTES_REF, + UTF8 ); - assertEquals(0, TopNOperator.compareRows(rows.v1(), rows.v1())); - rows = bytesRefRows(false, randomBoolean(), so -> rowFactory.row(page, 1, null, so), null, 0); - assertEquals(0, TopNOperator.compareRows(rows.v1(), rows.v1())); - - rows = bytesRefRows(true, randomBoolean(), so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertThat(TopNOperator.compareRows(rows.v1(), rows.v2()), greaterThan(0)); - rows = bytesRefRows(true, randomBoolean(), so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertThat(TopNOperator.compareRows(rows.v2(), rows.v1()), lessThan(0)); - rows = bytesRefRows(false, randomBoolean(), so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertThat(TopNOperator.compareRows(rows.v1(), rows.v2()), lessThan(0)); - rows = bytesRefRows(false, rarely(), so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertThat(TopNOperator.compareRows(rows.v2(), rows.v1()), greaterThan(0)); } public void testCompareBooleans() { - Block[] bs = new Block[] { - BooleanBlock.newBlockBuilder(2).appendBoolean(false).appendBoolean(true).build(), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(false).build() }; - - Page page = new Page(bs); - TopNOperator.RowFactory rowFactory = new TopNOperator.RowFactory(page); - - Block nullBlock = Block.constantNullBlock(2); - Block[] nullBs = new Block[] { nullBlock, nullBlock }; - Page nullPage = new Page(nullBs); - TopNOperator.RowFactory nullRowFactory = new TopNOperator.RowFactory(page); - - Tuple rows = nonBytesRefRows( - randomBoolean(), - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - 0 - ); - assertEquals(0, TopNOperator.compareRows(rows.v1(), rows.v1())); - assertEquals(0, TopNOperator.compareRows(rows.v2(), rows.v2())); - - rows = nonBytesRefRows( - randomBoolean(), - true, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - 0 - ); - assertEquals(-1, TopNOperator.compareRows(rows.v1(), rows.v2())); - rows = nonBytesRefRows( - randomBoolean(), - false, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - 0 - ); - assertEquals(1, TopNOperator.compareRows(rows.v1(), rows.v2())); - rows = nonBytesRefRows( - randomBoolean(), - true, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - 0 - ); - assertEquals(1, TopNOperator.compareRows(rows.v2(), rows.v1())); - rows = nonBytesRefRows( - randomBoolean(), - false, - so -> rowFactory.row(page, 0, null, so), - so -> nullRowFactory.row(nullPage, 0, null, so), - 0 + testCompare( + new Page(new Block[] { BooleanBlock.newBlockBuilder(2).appendBoolean(false).appendBoolean(true).build() }), + BOOLEAN, + DEFAULT_SORTABLE ); - assertEquals(-1, TopNOperator.compareRows(rows.v2(), rows.v1())); - - for (int i = 0; i < bs.length - 1; i++) { - rows = nonBytesRefRows( - true, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - 0 - ); - assertEquals(1, TopNOperator.compareRows(rows.v1(), rows.v2())); - rows = nonBytesRefRows( - true, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - 0 - ); - assertEquals(-1, TopNOperator.compareRows(rows.v2(), rows.v1())); - rows = nonBytesRefRows( - false, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - 0 - ); - assertEquals(-1, TopNOperator.compareRows(rows.v1(), rows.v2())); - rows = nonBytesRefRows( - false, - randomBoolean(), - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - 0 - ); - assertEquals(1, TopNOperator.compareRows(rows.v2(), rows.v1())); - } } - public void testCompareWithNulls() { - Block i1 = IntBlock.newBlockBuilder(2).appendInt(100).appendNull().build(); + private void testCompare(Page page, ElementType elementType, TopNEncoder encoder) { + Block nullBlock = Block.constantNullBlock(1); + Page nullPage = new Page(new Block[] { nullBlock, nullBlock, nullBlock, nullBlock, nullBlock }); - Page page = new Page(i1); - TopNOperator.RowFactory rowFactory = new TopNOperator.RowFactory(page); + for (int b = 0; b < page.getBlockCount(); b++) { + // Non-null identity + for (int p = 0; p < page.getPositionCount(); p++) { + TopNOperator.Row row = row(elementType, encoder, b, randomBoolean(), randomBoolean(), page, p); + assertEquals(0, TopNOperator.compareRows(row, row)); + } - Tuple rows = nonBytesRefRows( - randomBoolean(), - true, - so -> rowFactory.row(page, 0, null, so), - so -> rowFactory.row(page, 1, null, so), - 0 - ); - assertEquals(-1, TopNOperator.compareRows(rows.v1(), rows.v2())); - rows = nonBytesRefRows(randomBoolean(), true, so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertEquals(1, TopNOperator.compareRows(rows.v2(), rows.v1())); - rows = nonBytesRefRows(randomBoolean(), false, so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertEquals(1, TopNOperator.compareRows(rows.v1(), rows.v2())); - rows = nonBytesRefRows(randomBoolean(), false, so -> rowFactory.row(page, 0, null, so), so -> rowFactory.row(page, 1, null, so), 0); - assertEquals(-1, TopNOperator.compareRows(rows.v2(), rows.v1())); + // Null identity + for (int p = 0; p < page.getPositionCount(); p++) { + TopNOperator.Row row = row(elementType, encoder, b, randomBoolean(), randomBoolean(), nullPage, p); + assertEquals(0, TopNOperator.compareRows(row, row)); + } + + // nulls first + for (int p = 0; p < page.getPositionCount(); p++) { + boolean asc = randomBoolean(); + TopNOperator.Row nonNullRow = row(elementType, encoder, b, asc, true, page, p); + TopNOperator.Row nullRow = row(elementType, encoder, b, asc, true, nullPage, p); + assertEquals(-1, TopNOperator.compareRows(nonNullRow, nullRow)); + assertEquals(1, TopNOperator.compareRows(nullRow, nonNullRow)); + } + + // nulls last + for (int p = 0; p < page.getPositionCount(); p++) { + boolean asc = randomBoolean(); + TopNOperator.Row nonNullRow = row(elementType, encoder, b, asc, false, page, p); + TopNOperator.Row nullRow = row(elementType, encoder, b, asc, false, nullPage, p); + assertEquals(1, TopNOperator.compareRows(nonNullRow, nullRow)); + assertEquals(-1, TopNOperator.compareRows(nullRow, nonNullRow)); + } + + // ascending + { + boolean nullsFirst = randomBoolean(); + TopNOperator.Row r1 = row(elementType, encoder, b, true, nullsFirst, page, 0); + TopNOperator.Row r2 = row(elementType, encoder, b, true, nullsFirst, page, 1); + assertThat(TopNOperator.compareRows(r1, r2), greaterThan(0)); + assertThat(TopNOperator.compareRows(r2, r1), lessThan(0)); + } + // descending + { + boolean nullsFirst = randomBoolean(); + TopNOperator.Row r1 = row(elementType, encoder, b, false, nullsFirst, page, 0); + TopNOperator.Row r2 = row(elementType, encoder, b, false, nullsFirst, page, 1); + assertThat(TopNOperator.compareRows(r1, r2), lessThan(0)); + assertThat(TopNOperator.compareRows(r2, r1), greaterThan(0)); + } + } } - private List topN(List inputValues, int limit, boolean ascendingOrder, boolean nullsFirst) { - return topNTwoColumns( - inputValues.stream().map(v -> tuple(v, 0L)).toList(), - limit, - List.of(new TopNOperator.SortOrder(0, ascendingOrder, nullsFirst)) - ).stream().map(Tuple::v1).toList(); + private TopNOperator.Row row( + ElementType elementType, + TopNEncoder encoder, + int channel, + boolean asc, + boolean nullsFirst, + Page page, + int position + ) { + TopNOperator.RowFactory rf = new TopNOperator.RowFactory( + IntStream.range(0, page.getBlockCount()).mapToObj(i -> elementType).toList(), + IntStream.range(0, page.getBlockCount()).mapToObj(i -> encoder).toList(), + List.of(new TopNOperator.SortOrder(channel, asc, nullsFirst)), + page + ); + return rf.row(position, null); } public void testTopNTwoColumns() { List> values = Arrays.asList(tuple(1L, 1L), tuple(1L, 2L), tuple(null, null), tuple(null, 1L), tuple(1L, null)); assertThat( - topNTwoColumns(values, 5, List.of(new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(1, true, false))), + topNTwoColumns( + values, + 5, + List.of(LONG, LONG), + List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.DEFAULT_SORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(1, true, false)) + ), equalTo(List.of(tuple(1L, 1L), tuple(1L, 2L), tuple(1L, null), tuple(null, 1L), tuple(null, null))) ); assertThat( - topNTwoColumns(values, 5, List.of(new TopNOperator.SortOrder(0, true, true), new TopNOperator.SortOrder(1, true, false))), + topNTwoColumns( + values, + 5, + List.of(LONG, LONG), + List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.DEFAULT_SORTABLE), + List.of(new TopNOperator.SortOrder(0, true, true), new TopNOperator.SortOrder(1, true, false)) + ), equalTo(List.of(tuple(null, 1L), tuple(null, null), tuple(1L, 1L), tuple(1L, 2L), tuple(1L, null))) ); assertThat( - topNTwoColumns(values, 5, List.of(new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(1, true, true))), + topNTwoColumns( + values, + 5, + List.of(LONG, LONG), + List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.DEFAULT_SORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(1, true, true)) + ), equalTo(List.of(tuple(1L, null), tuple(1L, 1L), tuple(1L, 2L), tuple(null, null), tuple(null, 1L))) ); } @@ -499,10 +424,19 @@ public void testCollectAllValues() { expectedTop.add(topKeys); blocks.add(keys); + List elementTypes = new ArrayList<>(); + List encoders = new ArrayList<>(); + + // Add the keys + elementTypes.add(INT); + encoders.add(DEFAULT_SORTABLE); + for (ElementType e : ElementType.values()) { if (e == ElementType.UNKNOWN) { continue; } + elementTypes.add(e); + encoders.add(e == BYTES_REF ? UTF8 : DEFAULT_UNSORTABLE); List eTop = new ArrayList<>(); Block.Builder builder = e.newBlockBuilder(size); for (int i = 0; i < size; i++) { @@ -526,7 +460,9 @@ public void testCollectAllValues() { List.of( new TopNOperator( topCount, - List.of(new TopNOperator.SortOrder(0, false, false, BYTESREF_UTF8_ENCODER)), + elementTypes, + encoders, + List.of(new TopNOperator.SortOrder(0, false, false)), randomPageSize() ) ), @@ -554,11 +490,20 @@ public void testCollectAllValues_RandomMultiValues() { expectedTop.add(topKeys); blocks.add(keys); + List elementTypes = new ArrayList<>(blocksCount); + List encoders = new ArrayList<>(blocksCount); + + // Add the keys + elementTypes.add(INT); + encoders.add(DEFAULT_UNSORTABLE); + for (int type = 0; type < blocksCount; type++) { ElementType e = randomFrom(ElementType.values()); if (e == ElementType.UNKNOWN) { continue; } + elementTypes.add(e); + encoders.add(e == BYTES_REF ? UTF8 : DEFAULT_SORTABLE); List eTop = new ArrayList<>(); Block.Builder builder = e.newBlockBuilder(rows); for (int i = 0; i < rows; i++) { @@ -600,7 +545,9 @@ public void testCollectAllValues_RandomMultiValues() { List.of( new TopNOperator( topCount, - List.of(new TopNOperator.SortOrder(0, false, false, BYTESREF_UTF8_ENCODER)), + elementTypes, + encoders, + List.of(new TopNOperator.SortOrder(0, false, false)), randomPageSize() ) ), @@ -618,6 +565,8 @@ public void testCollectAllValues_RandomMultiValues() { private List> topNTwoColumns( List> inputValues, int limit, + List elementTypes, + List encoder, List sortOrders ) { DriverContext driverContext = new DriverContext(); @@ -626,7 +575,7 @@ private List> topNTwoColumns( Driver driver = new Driver( driverContext, new TupleBlockSourceOperator(inputValues, randomIntBetween(1, 1000)), - List.of(new TopNOperator(limit, sortOrders, randomPageSize())), + List.of(new TopNOperator(limit, elementTypes, encoder, sortOrders, randomPageSize())), new PageConsumerOperator(page -> { LongBlock block1 = page.getBlock(0); LongBlock block2 = page.getBlock(1); @@ -645,21 +594,25 @@ private List> topNTwoColumns( } public void testTopNManyDescriptionAndToString() { + int fixedLength = between(1, 100); TopNOperator.TopNOperatorFactory factory = new TopNOperator.TopNOperatorFactory( 10, - List.of( - new TopNOperator.SortOrder(1, false, false, BYTESREF_UTF8_ENCODER), - new TopNOperator.SortOrder(3, false, true, BYTESREF_FIXED_LENGTH_ENCODER) - ), + List.of(BYTES_REF, BYTES_REF), + List.of(UTF8, new FixedLengthTopNEncoder(fixedLength)), + List.of(new TopNOperator.SortOrder(1, false, false), new TopNOperator.SortOrder(3, false, true)), randomPageSize() ); - String sorts = List.of( - "SortOrder[channel=1, asc=false, nullsFirst=false, encoder=UTF8TopNEncoder]", - "SortOrder[channel=3, asc=false, nullsFirst=true, encoder=FixedLengthTopNEncoder]" - ).stream().collect(Collectors.joining(", ")); - assertThat(factory.describe(), equalTo("TopNOperator[count = 10, sortOrders = [" + sorts + "]]")); + String sorts = List.of("SortOrder[channel=1, asc=false, nullsFirst=false]", "SortOrder[channel=3, asc=false, nullsFirst=true]") + .stream() + .collect(Collectors.joining(", ")); + String tail = ", elementTypes=[BYTES_REF, BYTES_REF], encoders=[UTF8TopNEncoder, FixedLengthTopNEncoder[" + + fixedLength + + "]], sortOrders=[" + + sorts + + "]]"; + assertThat(factory.describe(), equalTo("TopNOperator[count=10" + tail)); try (Operator operator = factory.get(new DriverContext())) { - assertThat(operator.toString(), equalTo("TopNOperator[count = 0/10, sortOrders = [" + sorts + "]]")); + assertThat(operator.toString(), equalTo("TopNOperator[count=0/10" + tail)); } } @@ -705,6 +658,7 @@ public void testTopNWithSortingOnSameField_DESC_then_ASC_int() { INT_MV, List.of(100, List.of(-1, 63, 2), List.of(63, 61, 62), 50, List.of(22, 21, 22)), INT, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, false, false), new TopNOperator.SortOrder(0, true, false) ); @@ -728,6 +682,7 @@ public void testTopNWithSortingOnSameField_DESC_then_ASC_long() { LONG_MV, expectedValues, LONG, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, false, false), new TopNOperator.SortOrder(0, true, false) ); @@ -751,6 +706,7 @@ public void testTopNWithSortingOnSameField_DESC_then_ASC_double() { DOUBLE_MV, expectedValues, DOUBLE, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, false, false), new TopNOperator.SortOrder(0, true, false) ); @@ -761,6 +717,7 @@ public void testTopNWithSortingOnSameField_DESC_then_ASC_boolean() { BOOL_MV, List.of(List.of(true, false), List.of(true, false), true, List.of(true, true, true), List.of(false, false, false), false), BOOLEAN, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, false, false), new TopNOperator.SortOrder(0, true, false) ); @@ -777,8 +734,9 @@ public void testTopNWithSortingOnSameField_DESC_then_ASC_BytesRef() { new BytesRef("100") ), BYTES_REF, - new TopNOperator.SortOrder(0, false, false, BYTESREF_UTF8_ENCODER), - new TopNOperator.SortOrder(0, true, false, BYTESREF_UTF8_ENCODER) + UTF8, + new TopNOperator.SortOrder(0, false, false), + new TopNOperator.SortOrder(0, true, false) ); } @@ -787,6 +745,7 @@ public void testTopNWithSortingOnSameField_ASC_then_DESC_int() { INT_MV, List.of(List.of(-1, 63, 2), List.of(22, 21, 22), 50, List.of(63, 61, 62), 100), INT, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(0, false, false) ); @@ -810,6 +769,7 @@ public void testTopNWithSortingOnSameField_ASC_then_DESC_long() { LONG_MV, expectedValues, LONG, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(0, false, false) ); @@ -833,6 +793,7 @@ public void testTopNWithSortingOnSameField_ASC_then_DESC_double() { DOUBLE_MV, expectedValues, DOUBLE, + DEFAULT_SORTABLE, new TopNOperator.SortOrder(0, true, false), new TopNOperator.SortOrder(0, false, false) ); @@ -849,8 +810,9 @@ public void testTopNWithSortingOnSameField_ASC_then_DESC_BytesRef() { List.of(new BytesRef("63"), new BytesRef("61"), new BytesRef("62")) ), BYTES_REF, - new TopNOperator.SortOrder(0, true, false, BYTESREF_UTF8_ENCODER), - new TopNOperator.SortOrder(0, false, false, BYTESREF_UTF8_ENCODER) + UTF8, + new TopNOperator.SortOrder(0, true, false), + new TopNOperator.SortOrder(0, false, false) ); } @@ -858,6 +820,7 @@ private void assertSortingOnMV( List> values, List expectedValues, ElementType blockType, + TopNEncoder encoder, TopNOperator.SortOrder... sortOrders ) { Block block = TestBlockBuilder.blockFromValues(values, blockType); @@ -870,7 +833,7 @@ private void assertSortingOnMV( Driver driver = new Driver( new DriverContext(), new CannedSourceOperator(List.of(page).iterator()), - List.of(new TopNOperator(topCount, List.of(sortOrders), randomPageSize())), + List.of(new TopNOperator(topCount, List.of(blockType), List.of(encoder), List.of(sortOrders), randomPageSize())), new PageConsumerOperator(p -> readInto(actualValues, p)), () -> {} ) @@ -889,7 +852,8 @@ public void testRandomMultiValuesTopN() { Set uniqueOrders = new LinkedHashSet<>(sortingByColumns); List>> expectedValues = new ArrayList<>(rows); List blocks = new ArrayList<>(blocksCount); - Map columnBytesRefEncoder = new HashMap<>(blocksCount); + List elementTypes = new ArrayList<>(blocksCount); + List encoders = new ArrayList<>(blocksCount); for (int i = 0; i < rows; i++) { expectedValues.add(new ArrayList<>(blocksCount)); @@ -900,6 +864,7 @@ public void testRandomMultiValuesTopN() { t -> t == ElementType.UNKNOWN || t == ElementType.DOC, () -> randomFrom(ElementType.values()) ); + elementTypes.add(e); Block.Builder builder = e.newBlockBuilder(rows); List previousValue = null; Function randomValueSupplier = (blockType) -> randomValue(blockType); @@ -908,17 +873,19 @@ public void testRandomMultiValuesTopN() { if (randomBoolean()) { // deal with IP fields (BytesRef block) like ES does and properly encode the ip addresses randomValueSupplier = (blockType) -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); + // use the right BytesRef encoder (don't touch the bytes) + encoders.add(TopNEncoder.IP); } else { // create a valid Version randomValueSupplier = (blockType) -> randomVersion().toBytesRef(); + // use the right BytesRef encoder (don't touch the bytes) + encoders.add(TopNEncoder.VERSION); } - // use the right BytesRef encoder (don't touch the bytes) - columnBytesRefEncoder.put(type, BYTESREF_FIXED_LENGTH_ENCODER); } else { - columnBytesRefEncoder.put(type, BYTESREF_UTF8_ENCODER); + encoders.add(UTF8); } } else { - columnBytesRefEncoder.put(type, DEFAULT_ENCODER); + encoders.add(DEFAULT_SORTABLE); } for (int i = 0; i < rows; i++) { @@ -935,7 +902,8 @@ public void testRandomMultiValuesTopN() { values.add(value); } } else {// null or single-valued value - values.add(randomValueSupplier.apply(e)); + Object value = randomValueSupplier.apply(e); + values.add(value); } if (usually() && randomBoolean()) { @@ -963,12 +931,12 @@ public void testRandomMultiValuesTopN() { // same "nulls" handling) while (uniqueOrders.size() < sortingByColumns) { int column = randomIntBetween(0, blocksCount - 1); - uniqueOrders.add(new TopNOperator.SortOrder(column, randomBoolean(), randomBoolean(), columnBytesRefEncoder.get(column))); + uniqueOrders.add(new TopNOperator.SortOrder(column, randomBoolean(), randomBoolean())); } List>> actualValues = new ArrayList<>(); List results = this.drive( - new TopNOperator(topCount, uniqueOrders.stream().toList(), rows), + new TopNOperator(topCount, elementTypes, encoders, uniqueOrders.stream().toList(), rows), List.of(new Page(blocks.toArray(Block[]::new))).iterator() ); for (Page p : results) { @@ -982,11 +950,6 @@ public void testRandomMultiValuesTopN() { List> actualReducedValues = extractAndReduceSortedValues(actualValues, uniqueOrders); List> expectedReducedValues = extractAndReduceSortedValues(topNExpectedValues, uniqueOrders); - assertThat(actualReducedValues.size(), equalTo(topNExpectedValues.size())); - assertThat(expectedReducedValues.size(), equalTo(topNExpectedValues.size())); - for (int i = 0; i < topNExpectedValues.size(); i++) { - assertThat(topNExpectedValues.get(i).size(), equalTo(actualValues.get(i).size())); - } assertMap(actualReducedValues, matchesList(expectedReducedValues)); } @@ -999,15 +962,20 @@ public void testIPSortingSingleValue() throws UnknownHostException { append(builder, new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip)))); } - Set orders = new HashSet<>(1); - orders.add(new TopNOperator.SortOrder(0, asc, randomBoolean(), BYTESREF_FIXED_LENGTH_ENCODER)); - List> actual = new ArrayList<>(); try ( Driver driver = new Driver( new DriverContext(), new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), - List.of(new TopNOperator(ips.size(), orders.stream().toList(), randomPageSize())), + List.of( + new TopNOperator( + ips.size(), + List.of(BYTES_REF), + List.of(TopNEncoder.IP), + List.of(new TopNOperator.SortOrder(0, asc, randomBoolean())), + randomPageSize() + ) + ), new PageConsumerOperator(p -> readInto(actual, p)), () -> {} ) @@ -1117,15 +1085,20 @@ private void assertIPSortingOnMultiValues( } } - Set orders = new HashSet<>(1); - orders.add(new TopNOperator.SortOrder(0, asc, nullsFirst, BYTESREF_FIXED_LENGTH_ENCODER)); - List> actual = new ArrayList<>(); try ( Driver driver = new Driver( new DriverContext(), new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), - List.of(new TopNOperator(ips.size(), orders.stream().toList(), randomPageSize())), + List.of( + new TopNOperator( + ips.size(), + List.of(BYTES_REF), + List.of(TopNEncoder.IP), + List.of(new TopNOperator.SortOrder(0, asc, nullsFirst)), + randomPageSize() + ) + ), new PageConsumerOperator(p -> readInto(actual, p)), () -> {} ) @@ -1192,16 +1165,24 @@ public void testZeroByte() { List blocks = new ArrayList<>(2); blocks.add(builderText.build()); blocks.add(builderInt.build()); - Set orders = new HashSet<>(2); - orders.add(new TopNOperator.SortOrder(0, true, randomBoolean(), BYTESREF_UTF8_ENCODER)); - orders.add(new TopNOperator.SortOrder(1, randomBoolean(), randomBoolean(), DEFAULT_ENCODER)); List> actual = new ArrayList<>(); try ( Driver driver = new Driver( new DriverContext(), new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), - List.of(new TopNOperator(2, orders.stream().toList(), randomPageSize())), + List.of( + new TopNOperator( + 2, + List.of(BYTES_REF, INT), + List.of(TopNEncoder.UTF8, DEFAULT_UNSORTABLE), + List.of( + new TopNOperator.SortOrder(0, true, randomBoolean()), + new TopNOperator.SortOrder(1, randomBoolean(), randomBoolean()) + ), + randomPageSize() + ) + ), new PageConsumerOperator(p -> readInto(actual, p)), () -> {} ) @@ -1318,7 +1299,7 @@ private int comparePositions(boolean asc, boolean nullsFirst, List value } } - private Version randomVersion() { + static Version randomVersion() { return new Version(randomFrom(VERSIONS)); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java new file mode 100644 index 0000000000000..be65cda3cce70 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.topn; + +import org.apache.lucene.tests.util.RamUsageTester; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class TopNRowTests extends ESTestCase { + public void testRamBytesUsedEmpty() { + TopNOperator.Row row = new TopNOperator.Row(); + // We double count the shared empty array for empty rows. This overcounting is *fine*, but throws off the test. + assertThat(row.ramBytesUsed(), equalTo(RamUsageTester.ramUsed(row) + RamUsageTester.ramUsed(new byte[0]))); + } + + public void testRamBytesUsedSmall() { + TopNOperator.Row row = new TopNOperator.Row(); + row.keys.append(randomByte()); + row.values.append(randomByte()); + assertThat(row.ramBytesUsed(), equalTo(RamUsageTester.ramUsed(row))); + } + + public void testRamBytesUsedBig() { + TopNOperator.Row row = new TopNOperator.Row(); + for (int i = 0; i < 10000; i++) { + row.keys.append(randomByte()); + row.values.append(randomByte()); + } + assertThat(row.ramBytesUsed(), equalTo(RamUsageTester.ramUsed(row))); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/DefaultLayout.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/DefaultLayout.java index e3a520149108a..384615f6c19d4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/DefaultLayout.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/DefaultLayout.java @@ -9,10 +9,10 @@ import org.elasticsearch.xpack.ql.expression.NameId; -import java.util.HashMap; +import java.util.ArrayList; import java.util.HashSet; +import java.util.List; import java.util.Map; -import java.util.Set; class DefaultLayout implements Layout { private final Map layout; @@ -37,12 +37,22 @@ public int numberOfChannels() { } @Override - public Map> inverse() { - Map> inverse = new HashMap<>(); + public List inverse() { + List inverse = new ArrayList<>(numberOfChannels); + for (int i = 0; i < numberOfChannels; i++) { + inverse.add(null); + } for (Map.Entry entry : layout.entrySet()) { - NameId key = entry.getKey(); - Integer value = entry.getValue().channel(); - inverse.computeIfAbsent(value, k -> new HashSet<>()).add(key); + ChannelSet set = inverse.get(entry.getValue().channel()); + if (set == null) { + set = new ChannelSet(new HashSet<>(), entry.getValue().type()); + inverse.set(entry.getValue().channel(), set); + } else { + if (set.type() != entry.getValue().type()) { + throw new IllegalArgumentException(); + } + } + set.nameIds().add(entry.getKey()); } return inverse; } @@ -52,11 +62,11 @@ public Map> inverse() { */ @Override public Layout.Builder builder() { - return new Builder(numberOfChannels, layout); + return new Builder(inverse()); } @Override public String toString() { - return "Layout{" + "layout=" + layout + ", numberOfChannels=" + numberOfChannels + '}'; + return "Layout{layout=" + layout + ", numberOfChannels=" + numberOfChannels + '}'; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ExchangeLayout.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ExchangeLayout.java index 43bb36d070b79..b9227e1d638a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ExchangeLayout.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/ExchangeLayout.java @@ -10,8 +10,8 @@ import org.elasticsearch.xpack.ql.expression.NameId; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.Set; /** * Decorating layout that creates the NameId -> Value lazily based on the calls made to its content. @@ -19,7 +19,7 @@ */ class ExchangeLayout implements Layout { private final Layout delegate; - private final Map> inverse; + private final List inverse; private final Map mappingToOldLayout; private int counter; @@ -33,7 +33,7 @@ class ExchangeLayout implements Layout { public ChannelAndType get(NameId id) { var oldId = mappingToOldLayout.get(id); if (oldId == null && counter < inverse.size()) { - var names = inverse.get(counter++); + var names = inverse.get(counter++).nameIds(); for (var name : names) { oldId = name; mappingToOldLayout.put(id, oldId); @@ -58,7 +58,7 @@ public Builder builder() { } @Override - public Map> inverse() { + public List inverse() { throw new UnsupportedOperationException(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java index d9df22fca5d21..8b91b9818a65a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java @@ -15,7 +15,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -52,33 +51,25 @@ record ChannelSet(Set nameIds, DataType type) {} */ Layout.Builder builder(); - Map> inverse(); + /** + * Build a list whose index is each channel id and who's values are + * all link {@link NameId}s at that position and their {@link DataType}. + */ + List inverse(); /** * Builder class for Layout. The builder ensures that layouts cannot be altered after creation (through references to the underlying * map). */ class Builder { - private final List channels = new ArrayList<>(); + private final List channels; - public Builder() {} + public Builder() { + channels = new ArrayList<>(); + } - Builder(int numberOfChannels, Map layout) { - for (int i = 0; i < numberOfChannels; i++) { - channels.add(null); - } - for (Map.Entry entry : layout.entrySet()) { - ChannelSet set = channels.get(entry.getValue().channel); - if (set == null) { - set = new ChannelSet(new HashSet<>(), entry.getValue().type()); - channels.set(entry.getValue().channel, set); - } else { - if (set.type != entry.getValue().type()) { - throw new IllegalArgumentException(); - } - } - set.nameIds.add(entry.getKey()); - } + Builder(List channels) { + this.channels = channels; } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 467e04deb579d..b2f73a624b00c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -33,13 +33,13 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; import org.elasticsearch.compute.operator.StringExtractOperator; -import org.elasticsearch.compute.operator.TopNEncoder; -import org.elasticsearch.compute.operator.TopNOperator; -import org.elasticsearch.compute.operator.TopNOperator.TopNOperatorFactory; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator.ExchangeSinkOperatorFactory; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator.ExchangeSourceOperatorFactory; +import org.elasticsearch.compute.operator.topn.TopNEncoder; +import org.elasticsearch.compute.operator.topn.TopNOperator; +import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.tasks.CancellableTask; @@ -82,6 +82,7 @@ import org.elasticsearch.xpack.ql.util.Holder; import java.util.ArrayList; +import java.util.Arrays; import java.util.BitSet; import java.util.HashMap; import java.util.HashSet; @@ -256,6 +257,9 @@ public static ElementType toElementType(DataType dataType) { if (dataType == DataTypes.BOOLEAN) { return ElementType.BOOLEAN; } + if (dataType == EsQueryExec.DOC_DATA_TYPE) { + return ElementType.DOC; + } throw EsqlIllegalArgumentException.illegalDataType(dataType); } @@ -325,6 +329,20 @@ private PhysicalOperation planExchangeSource(ExchangeSourceExec exchangeSource, private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(topNExec.child(), context); + ElementType[] elementTypes = new ElementType[source.layout.numberOfChannels()]; + TopNEncoder[] encoders = new TopNEncoder[source.layout.numberOfChannels()]; + List inverse = source.layout.inverse(); + for (int channel = 0; channel < inverse.size(); channel++) { + elementTypes[channel] = toElementType(inverse.get(channel).type()); + encoders[channel] = switch (inverse.get(channel).type().typeName()) { + case "ip" -> TopNEncoder.IP; + case "text", "keyword" -> TopNEncoder.UTF8; + case "version" -> TopNEncoder.VERSION; + case "boolean", "null", "byte", "short", "integer", "long", "double", "float", "half_float", "datetime", "date_period", + "time_duration", "object", "nested", "scaled_float", "unsigned_long", "_doc" -> TopNEncoder.DEFAULT_SORTABLE; + default -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); + }; + } List orders = topNExec.order().stream().map(order -> { int sortByChannel; if (order.child() instanceof Attribute a) { @@ -333,28 +351,10 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte throw new EsqlIllegalArgumentException("order by expression must be an attribute"); } - TopNEncoder encoder = switch (a.dataType().typeName()) { - case "ip": { - yield TopNOperator.BYTESREF_FIXED_LENGTH_ENCODER; - } - case "text", "keyword": { - yield TopNOperator.BYTESREF_UTF8_ENCODER; - } - case "version": { - yield TopNOperator.BYTESREF_FIXED_LENGTH_ENCODER; - } - case "boolean", "null", "byte", "short", "integer", "long", "double", "float", "half_float", "datetime", "date_period", - "time_duration", "object", "nested", "scaled_float", "unsigned_long": { - yield TopNOperator.DEFAULT_ENCODER; - } - default: - throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + a.dataType().typeName()); - }; return new TopNOperator.SortOrder( sortByChannel, order.direction().equals(Order.OrderDirection.ASC), - order.nullsPosition().equals(Order.NullsPosition.FIRST), - encoder + order.nullsPosition().equals(Order.NullsPosition.FIRST) ); }).toList(); @@ -374,7 +374,16 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte * That'll be more accurate. And we don't have a path for estimating * incoming rows. And we don't need one because we can estimate. */ - return source.with(new TopNOperatorFactory(limit, orders, context.pageSize(2000 + topNExec.estimatedRowSize())), source.layout); + return source.with( + new TopNOperatorFactory( + limit, + Arrays.asList(elementTypes), + Arrays.asList(encoders), + orders, + context.pageSize(2000 + topNExec.estimatedRowSize()) + ), + source.layout + ); } private PhysicalOperation planEval(EvalExec eval, LocalExecutionPlannerContext context) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 24c7612cd1326..d10f58671d71c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -13,8 +13,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; @@ -33,6 +37,7 @@ import java.util.List; import java.util.Random; import java.util.function.Supplier; +import java.util.stream.IntStream; import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween; import static java.util.stream.Collectors.joining; @@ -94,16 +99,15 @@ public Page getOutput() { finish(); } - Block[] fakeSourceAttributesBlocks = new Block[1]; - // a block that contains the position of each document as int - // will be used to "filter" and extract the block's values later on. Basically, a replacement for _doc, _shard and _segment ids - IntBlock.Builder docIndexBlockBuilder = IntBlock.newBlockBuilder(testData.getPositionCount()); - for (int i = 0; i < testData.getPositionCount(); i++) { - docIndexBlockBuilder.appendInt(i); - } - fakeSourceAttributesBlocks[0] = docIndexBlockBuilder.build(); // instead of _doc - Page newPageWithSourceAttributes = new Page(fakeSourceAttributesBlocks); - return newPageWithSourceAttributes; + return new Page( + new Block[] { + new DocVector( + IntBlock.newConstantBlockWith(0, testData.getPositionCount()).asVector(), + IntBlock.newConstantBlockWith(0, testData.getPositionCount()).asVector(), + new IntArrayVector(IntStream.range(0, testData.getPositionCount()).toArray(), testData.getPositionCount()), + true + ).asBlock() } + ); } @Override @@ -285,13 +289,12 @@ private Block extractBlockForColumn(Page page, String columnName) { if (columnIndex < 0) { throw new EsqlIllegalArgumentException("Cannot find column named [{}] in {}", columnName, columnNames); } - // this is the first block added by TestSourceOperator - IntBlock docIndexBlock = page.getBlock(0); - // use its filtered position to extract the data needed for "columnName" block + DocBlock docBlock = page.getBlock(0); + IntVector docIndices = docBlock.asVector().docs(); Block loadedBlock = testData.getBlock(columnIndex); - int[] filteredPositions = new int[docIndexBlock.getPositionCount()]; - for (int c = 0; c < docIndexBlock.getPositionCount(); c++) { - filteredPositions[c] = (Integer) docIndexBlock.getInt(c); + int[] filteredPositions = new int[docIndices.getPositionCount()]; + for (int c = 0; c < docIndices.getPositionCount(); c++) { + filteredPositions[c] = docIndices.getInt(c); } return loadedBlock.filter(filteredPositions); } From baf11a9d03ce72cca9c9afe1a1c42b2e2ec55ea5 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 13 Sep 2023 22:09:23 +0100 Subject: [PATCH 054/114] ESQL: Add DriverContext to the construction of Evaluators (#99518) This commit adds DriverContext to the construction of Evaluators. DriverContext is enriched to carry bigArrays, and will eventually carry a BlockFactory - it's the context for code requiring to create instances of blocks and big arrays. --- .../compute/operator/EvalBenchmark.java | 15 +++-- .../compute/gen/EvaluatorImplementer.java | 4 ++ .../compute/gen/MvEvaluatorImplementer.java | 4 ++ .../org/elasticsearch/compute/gen/Types.java | 2 + .../operator/ColumnExtractOperator.java | 5 +- .../compute/operator/DriverContext.java | 21 ++++++ .../compute/operator/EvalOperator.java | 14 ++-- .../compute/operator/FilterOperator.java | 8 +-- .../compute/operator/MultivalueDedupe.java | 20 +++--- .../operator/StringExtractOperator.java | 5 +- .../operator/ColumnExtractOperatorTests.java | 2 +- .../compute/operator/EvalOperatorTests.java | 2 +- .../compute/operator/FilterOperatorTests.java | 2 +- .../operator/StringExtractOperatorTests.java | 2 +- .../comparison/EqualsBoolsEvaluator.java | 6 +- .../comparison/EqualsDoublesEvaluator.java | 6 +- .../comparison/EqualsIntsEvaluator.java | 6 +- .../comparison/EqualsKeywordsEvaluator.java | 6 +- .../comparison/EqualsLongsEvaluator.java | 6 +- .../GreaterThanDoublesEvaluator.java | 6 +- .../comparison/GreaterThanIntsEvaluator.java | 6 +- .../GreaterThanKeywordsEvaluator.java | 6 +- .../comparison/GreaterThanLongsEvaluator.java | 6 +- .../GreaterThanOrEqualDoublesEvaluator.java | 6 +- .../GreaterThanOrEqualIntsEvaluator.java | 6 +- .../GreaterThanOrEqualKeywordsEvaluator.java | 6 +- .../GreaterThanOrEqualLongsEvaluator.java | 6 +- .../comparison/LessThanDoublesEvaluator.java | 6 +- .../comparison/LessThanIntsEvaluator.java | 6 +- .../comparison/LessThanKeywordsEvaluator.java | 6 +- .../comparison/LessThanLongsEvaluator.java | 6 +- .../LessThanOrEqualDoublesEvaluator.java | 6 +- .../LessThanOrEqualIntsEvaluator.java | 6 +- .../LessThanOrEqualKeywordsEvaluator.java | 6 +- .../LessThanOrEqualLongsEvaluator.java | 6 +- .../comparison/NotEqualsBoolsEvaluator.java | 6 +- .../comparison/NotEqualsDoublesEvaluator.java | 6 +- .../comparison/NotEqualsIntsEvaluator.java | 6 +- .../NotEqualsKeywordsEvaluator.java | 6 +- .../comparison/NotEqualsLongsEvaluator.java | 6 +- .../operator/logical/NotEvaluator.java | 6 +- .../operator/regex/RegexMatchEvaluator.java | 8 ++- .../conditional/GreatestBooleanEvaluator.java | 7 +- .../GreatestBytesRefEvaluator.java | 7 +- .../conditional/GreatestDoubleEvaluator.java | 7 +- .../conditional/GreatestIntEvaluator.java | 7 +- .../conditional/GreatestLongEvaluator.java | 7 +- .../conditional/LeastBooleanEvaluator.java | 7 +- .../conditional/LeastBytesRefEvaluator.java | 7 +- .../conditional/LeastDoubleEvaluator.java | 7 +- .../scalar/conditional/LeastIntEvaluator.java | 6 +- .../conditional/LeastLongEvaluator.java | 7 +- .../date/DateExtractConstantEvaluator.java | 6 +- .../scalar/date/DateExtractEvaluator.java | 6 +- .../date/DateFormatConstantEvaluator.java | 8 ++- .../scalar/date/DateFormatEvaluator.java | 6 +- .../date/DateParseConstantEvaluator.java | 6 +- .../scalar/date/DateParseEvaluator.java | 6 +- .../scalar/date/DateTruncEvaluator.java | 7 +- .../function/scalar/date/NowEvaluator.java | 6 +- .../scalar/ip/CIDRMatchEvaluator.java | 6 +- .../scalar/math/AbsDoubleEvaluator.java | 7 +- .../function/scalar/math/AbsIntEvaluator.java | 6 +- .../scalar/math/AbsLongEvaluator.java | 6 +- .../function/scalar/math/AcosEvaluator.java | 7 +- .../function/scalar/math/AsinEvaluator.java | 7 +- .../function/scalar/math/Atan2Evaluator.java | 7 +- .../function/scalar/math/AtanEvaluator.java | 6 +- .../scalar/math/CastIntToDoubleEvaluator.java | 6 +- .../scalar/math/CastIntToLongEvaluator.java | 6 +- .../math/CastIntToUnsignedLongEvaluator.java | 7 +- .../math/CastLongToDoubleEvaluator.java | 7 +- .../math/CastLongToUnsignedLongEvaluator.java | 7 +- .../CastUnsignedLongToDoubleEvaluator.java | 7 +- .../scalar/math/CeilDoubleEvaluator.java | 6 +- .../function/scalar/math/CosEvaluator.java | 6 +- .../function/scalar/math/CoshEvaluator.java | 7 +- .../scalar/math/FloorDoubleEvaluator.java | 6 +- .../scalar/math/IsFiniteEvaluator.java | 6 +- .../scalar/math/IsInfiniteEvaluator.java | 6 +- .../function/scalar/math/IsNaNEvaluator.java | 6 +- .../scalar/math/Log10DoubleEvaluator.java | 7 +- .../scalar/math/Log10IntEvaluator.java | 7 +- .../scalar/math/Log10LongEvaluator.java | 7 +- .../math/Log10UnsignedLongEvaluator.java | 7 +- .../scalar/math/PowDoubleEvaluator.java | 6 +- .../function/scalar/math/PowIntEvaluator.java | 6 +- .../scalar/math/PowLongEvaluator.java | 6 +- .../scalar/math/RoundDoubleEvaluator.java | 6 +- .../math/RoundDoubleNoDecimalsEvaluator.java | 7 +- .../scalar/math/RoundIntEvaluator.java | 6 +- .../scalar/math/RoundLongEvaluator.java | 6 +- .../math/RoundUnsignedLongEvaluator.java | 6 +- .../function/scalar/math/SinEvaluator.java | 6 +- .../function/scalar/math/SinhEvaluator.java | 7 +- .../scalar/math/SqrtDoubleEvaluator.java | 7 +- .../scalar/math/SqrtIntEvaluator.java | 7 +- .../scalar/math/SqrtLongEvaluator.java | 7 +- .../math/SqrtUnsignedLongEvaluator.java | 7 +- .../function/scalar/math/TanEvaluator.java | 6 +- .../function/scalar/math/TanhEvaluator.java | 6 +- .../multivalue/MvAvgDoubleEvaluator.java | 6 +- .../scalar/multivalue/MvAvgIntEvaluator.java | 6 +- .../scalar/multivalue/MvAvgLongEvaluator.java | 6 +- .../MvAvgUnsignedLongEvaluator.java | 7 +- .../multivalue/MvMaxBooleanEvaluator.java | 7 +- .../multivalue/MvMaxBytesRefEvaluator.java | 7 +- .../multivalue/MvMaxDoubleEvaluator.java | 6 +- .../scalar/multivalue/MvMaxIntEvaluator.java | 6 +- .../scalar/multivalue/MvMaxLongEvaluator.java | 6 +- .../multivalue/MvMedianDoubleEvaluator.java | 7 +- .../multivalue/MvMedianIntEvaluator.java | 6 +- .../multivalue/MvMedianLongEvaluator.java | 7 +- .../MvMedianUnsignedLongEvaluator.java | 7 +- .../multivalue/MvMinBooleanEvaluator.java | 7 +- .../multivalue/MvMinBytesRefEvaluator.java | 7 +- .../multivalue/MvMinDoubleEvaluator.java | 6 +- .../scalar/multivalue/MvMinIntEvaluator.java | 6 +- .../scalar/multivalue/MvMinLongEvaluator.java | 6 +- .../multivalue/MvSumDoubleEvaluator.java | 6 +- .../scalar/multivalue/MvSumIntEvaluator.java | 7 +- .../scalar/multivalue/MvSumLongEvaluator.java | 7 +- .../MvSumUnsignedLongEvaluator.java | 7 +- .../scalar/string/ConcatEvaluator.java | 7 +- .../scalar/string/LTrimEvaluator.java | 6 +- .../function/scalar/string/LeftEvaluator.java | 7 +- .../scalar/string/LengthEvaluator.java | 6 +- .../scalar/string/RTrimEvaluator.java | 6 +- .../scalar/string/RightEvaluator.java | 7 +- .../string/SplitSingleByteEvaluator.java | 6 +- .../scalar/string/SplitVariableEvaluator.java | 6 +- .../scalar/string/StartsWithEvaluator.java | 6 +- .../scalar/string/SubstringEvaluator.java | 7 +- .../string/SubstringNoLengthEvaluator.java | 6 +- .../function/scalar/string/TrimEvaluator.java | 6 +- .../arithmetic/AddDatetimesEvaluator.java | 6 +- .../arithmetic/AddDoublesEvaluator.java | 6 +- .../operator/arithmetic/AddIntsEvaluator.java | 6 +- .../arithmetic/AddLongsEvaluator.java | 6 +- .../arithmetic/AddUnsignedLongsEvaluator.java | 6 +- .../arithmetic/DivDoublesEvaluator.java | 6 +- .../operator/arithmetic/DivIntsEvaluator.java | 6 +- .../arithmetic/DivLongsEvaluator.java | 6 +- .../arithmetic/DivUnsignedLongsEvaluator.java | 6 +- .../arithmetic/ModDoublesEvaluator.java | 6 +- .../operator/arithmetic/ModIntsEvaluator.java | 6 +- .../arithmetic/ModLongsEvaluator.java | 6 +- .../arithmetic/ModUnsignedLongsEvaluator.java | 6 +- .../arithmetic/MulDoublesEvaluator.java | 6 +- .../operator/arithmetic/MulIntsEvaluator.java | 6 +- .../arithmetic/MulLongsEvaluator.java | 6 +- .../arithmetic/MulUnsignedLongsEvaluator.java | 6 +- .../arithmetic/NegDoublesEvaluator.java | 6 +- .../operator/arithmetic/NegIntsEvaluator.java | 7 +- .../arithmetic/NegLongsEvaluator.java | 7 +- .../arithmetic/SubDatetimesEvaluator.java | 6 +- .../arithmetic/SubDoublesEvaluator.java | 6 +- .../operator/arithmetic/SubIntsEvaluator.java | 6 +- .../arithmetic/SubLongsEvaluator.java | 6 +- .../arithmetic/SubUnsignedLongsEvaluator.java | 6 +- .../xpack/esql/evaluator/EvalMapper.java | 40 ++++++------ .../evaluator/mapper/EvaluatorMapper.java | 9 ++- .../evaluator/mapper/ExpressionMapper.java | 6 +- .../operator/comparison/ComparisonMapper.java | 65 +++++++++---------- .../operator/comparison/InMapper.java | 10 +-- .../predicate/operator/regex/RegexMapper.java | 13 ++-- .../function/scalar/conditional/Case.java | 27 ++++---- .../function/scalar/conditional/Greatest.java | 49 +++++++++----- .../function/scalar/conditional/Least.java | 49 +++++++++----- .../convert/AbstractConvertFunction.java | 11 ++-- .../function/scalar/date/DateExtract.java | 21 +++--- .../function/scalar/date/DateFormat.java | 22 ++++--- .../function/scalar/date/DateParse.java | 17 ++--- .../function/scalar/date/DateTrunc.java | 16 ++--- .../expression/function/scalar/date/Now.java | 9 +-- .../function/scalar/ip/CIDRMatch.java | 15 ++--- .../expression/function/scalar/math/Abs.java | 15 ++--- .../math/AbstractTrigonometricFunction.java | 13 ++-- .../expression/function/scalar/math/Acos.java | 5 +- .../expression/function/scalar/math/Asin.java | 5 +- .../expression/function/scalar/math/Atan.java | 5 +- .../function/scalar/math/Atan2.java | 13 ++-- .../function/scalar/math/AutoBucket.java | 7 +- .../expression/function/scalar/math/Cast.java | 24 +++---- .../expression/function/scalar/math/Ceil.java | 11 ++-- .../expression/function/scalar/math/Cos.java | 5 +- .../expression/function/scalar/math/Cosh.java | 5 +- .../function/scalar/math/Floor.java | 11 ++-- .../function/scalar/math/IsFinite.java | 11 ++-- .../function/scalar/math/IsInfinite.java | 11 ++-- .../function/scalar/math/IsNaN.java | 11 ++-- .../function/scalar/math/Log10.java | 18 ++--- .../expression/function/scalar/math/Pow.java | 28 ++++---- .../function/scalar/math/Round.java | 39 ++++++----- .../expression/function/scalar/math/Sin.java | 5 +- .../expression/function/scalar/math/Sinh.java | 5 +- .../expression/function/scalar/math/Sqrt.java | 18 ++--- .../expression/function/scalar/math/Tan.java | 5 +- .../expression/function/scalar/math/Tanh.java | 5 +- .../AbstractMultivalueFunction.java | 9 +-- .../function/scalar/multivalue/MvAvg.java | 14 ++-- .../function/scalar/multivalue/MvConcat.java | 12 ++-- .../function/scalar/multivalue/MvCount.java | 6 +- .../function/scalar/multivalue/MvDedupe.java | 5 +- .../function/scalar/multivalue/MvMax.java | 16 ++--- .../function/scalar/multivalue/MvMedian.java | 13 ++-- .../function/scalar/multivalue/MvMin.java | 16 ++--- .../function/scalar/multivalue/MvSum.java | 14 ++-- .../function/scalar/nulls/Coalesce.java | 11 ++-- .../function/scalar/string/Concat.java | 13 ++-- .../function/scalar/string/LTrim.java | 11 ++-- .../function/scalar/string/Left.java | 16 ++--- .../function/scalar/string/Length.java | 11 ++-- .../function/scalar/string/RTrim.java | 11 ++-- .../function/scalar/string/Right.java | 16 ++--- .../function/scalar/string/Split.java | 15 ++--- .../function/scalar/string/StartsWith.java | 13 ++-- .../function/scalar/string/Substring.java | 17 ++--- .../function/scalar/string/Trim.java | 11 ++-- .../predicate/operator/arithmetic/Add.java | 2 +- .../DateTimeArithmeticOperation.java | 22 +++++-- .../predicate/operator/arithmetic/Div.java | 2 +- .../arithmetic/EsqlArithmeticOperation.java | 22 ++++--- .../predicate/operator/arithmetic/Mod.java | 2 +- .../predicate/operator/arithmetic/Mul.java | 2 +- .../predicate/operator/arithmetic/Neg.java | 11 ++-- .../predicate/operator/arithmetic/Sub.java | 2 +- .../esql/planner/LocalExecutionPlanner.java | 8 +-- .../function/AbstractFunctionTestCase.java | 15 +++-- .../scalar/conditional/CaseTests.java | 9 ++- .../function/scalar/math/RoundTests.java | 10 ++- .../AbstractMultivalueFunctionTestCase.java | 3 +- .../scalar/multivalue/MvConcatTests.java | 22 +++++-- .../function/scalar/nulls/CoalesceTests.java | 5 +- .../function/scalar/string/ConcatTests.java | 5 +- .../function/scalar/string/LeftTests.java | 3 +- .../function/scalar/string/RightTests.java | 3 +- .../function/scalar/string/SplitTests.java | 3 +- .../scalar/string/SubstringTests.java | 8 ++- .../AbstractBinaryOperatorTestCase.java | 3 +- .../operator/arithmetic/NegTests.java | 3 +- .../xpack/esql/planner/EvalMapperTests.java | 8 +-- 242 files changed, 1382 insertions(+), 727 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index e805958d04e78..ce839a4c8eace 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.core.TimeValue; @@ -79,14 +80,14 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { return switch (operation) { case "abs" -> { FieldAttribute longField = longField(); - yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(); + yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(new DriverContext()); } case "add" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataTypes.LONG)), layout(longField) - ).get(); + ).get(new DriverContext()); } case "date_trunc" -> { FieldAttribute timestamp = new FieldAttribute( @@ -97,28 +98,28 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { yield EvalMapper.toEvaluator( new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), EsqlDataTypes.TIME_DURATION), timestamp), layout(timestamp) - ).get(); + ).get(new DriverContext()); } case "equal_to_const" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataTypes.LONG)), layout(longField) - ).get(); + ).get(new DriverContext()); } case "long_equal_to_long" -> { FieldAttribute lhs = longField(); FieldAttribute rhs = longField(); - yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(); + yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(new DriverContext()); } case "long_equal_to_int" -> { FieldAttribute lhs = longField(); FieldAttribute rhs = intField(); - yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(); + yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(new DriverContext()); } case "mv_min", "mv_min_ascending" -> { FieldAttribute longField = longField(); - yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(); + yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(new DriverContext()); } default -> throw new UnsupportedOperationException(); }; diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 4ea351084bcc4..92601a232a68b 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -34,6 +34,7 @@ import static org.elasticsearch.compute.gen.Methods.getMethod; import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BYTES_REF; +import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SOURCE; @@ -77,6 +78,7 @@ private TypeSpec type() { builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); } processFunction.args.stream().forEach(a -> a.declareField(builder)); + builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); builder.addMethod(ctor()); builder.addMethod(eval()); @@ -95,6 +97,8 @@ private MethodSpec ctor() { builder.addStatement("this.warnings = new Warnings(source)"); } processFunction.args.stream().forEach(a -> a.implementCtor(builder)); + builder.addParameter(DRIVER_CONTEXT, "driverContext"); + builder.addStatement("this.driverContext = driverContext"); return builder.build(); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 124179b7447e8..9547548ba42ae 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -35,6 +35,7 @@ import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.BYTES_REF_ARRAY; +import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.VECTOR; @@ -129,6 +130,7 @@ private TypeSpec type() { builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); } + builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); builder.addMethod(ctor()); builder.addMethod(name()); @@ -159,6 +161,8 @@ private MethodSpec ctor() { if (warnExceptions.isEmpty() == false) { builder.addStatement("this.warnings = new Warnings(source)"); } + builder.addParameter(DRIVER_CONTEXT, "driverContext"); + builder.addStatement("this.driverContext = driverContext"); return builder.build(); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 2f76d1a73e480..4f98c10598bb8 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -88,6 +88,8 @@ public class Types { static final ClassName INTERMEDIATE_STATE_DESC = ClassName.get(AGGREGATION_PACKAGE, "IntermediateStateDesc"); static final TypeName LIST_AGG_FUNC_DESC = ParameterizedTypeName.get(ClassName.get(List.class), INTERMEDIATE_STATE_DESC); + static final ClassName DRIVER_CONTEXT = ClassName.get(OPERATOR_PACKAGE, "DriverContext"); + static final ClassName EXPRESSION_EVALUATOR = ClassName.get(OPERATOR_PACKAGE, "EvalOperator", "ExpressionEvaluator"); static final ClassName ABSTRACT_MULTIVALUE_FUNCTION_EVALUATOR = ClassName.get( "org.elasticsearch.xpack.esql.expression.function.scalar.multivalue", diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java index 8e5244cb75226..3facf9edc7765 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import java.util.function.Supplier; @@ -19,13 +20,13 @@ public class ColumnExtractOperator extends AbstractPageMappingOperator { public record Factory( ElementType[] types, - Supplier inputEvalSupplier, + ExpressionEvaluator.Factory inputEvalSupplier, Supplier evaluatorSupplier ) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { - return new ColumnExtractOperator(types, inputEvalSupplier.get(), evaluatorSupplier.get()); + return new ColumnExtractOperator(types, inputEvalSupplier.get(driverContext), evaluatorSupplier.get()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index 6512c417b91ca..4e95e582769b5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -7,10 +7,12 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Releasable; import java.util.Collections; import java.util.IdentityHashMap; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; @@ -33,11 +35,30 @@ */ public class DriverContext { + /** A default driver context. The returned bigArrays is non recycling. */ + public static DriverContext DEFAULT = new DriverContext(BigArrays.NON_RECYCLING_INSTANCE); + // Working set. Only the thread executing the driver will update this set. Set workingSet = Collections.newSetFromMap(new IdentityHashMap<>()); private final AtomicReference snapshot = new AtomicReference<>(); + private final BigArrays bigArrays; + + // For testing + public DriverContext() { + this(BigArrays.NON_RECYCLING_INSTANCE); + } + + public DriverContext(BigArrays bigArrays) { + Objects.requireNonNull(bigArrays); + this.bigArrays = bigArrays; + } + + public BigArrays bigArrays() { + return bigArrays; + } + /** A snapshot of the driver context. */ public record Snapshot(Set releasables) {} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index f15686897c008..7202f05b5562a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -10,24 +10,22 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; -import java.util.function.Supplier; - /** * Evaluates a tree of functions for every position in the block, resulting in a * new block which is appended to the page. */ public class EvalOperator extends AbstractPageMappingOperator { - public record EvalOperatorFactory(Supplier evaluator) implements OperatorFactory { + public record EvalOperatorFactory(ExpressionEvaluator.Factory evaluator) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { - return new EvalOperator(evaluator.get()); + return new EvalOperator(evaluator.get(driverContext)); } @Override public String describe() { - return "EvalOperator[evaluator=" + evaluator.get() + "]"; + return "EvalOperator[evaluator=" + evaluator.get(DriverContext.DEFAULT) + "]"; } } @@ -48,6 +46,12 @@ public String toString() { } public interface ExpressionEvaluator { + + /** A Factory for creating ExpressionEvaluators. */ + interface Factory { + ExpressionEvaluator get(DriverContext driverContext); + } + Block eval(Page page); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java index 61e7c25d1000b..20864373e8016 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java @@ -10,24 +10,24 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import java.util.Arrays; -import java.util.function.Supplier; public class FilterOperator extends AbstractPageMappingOperator { private final EvalOperator.ExpressionEvaluator evaluator; - public record FilterOperatorFactory(Supplier evaluatorSupplier) implements OperatorFactory { + public record FilterOperatorFactory(ExpressionEvaluator.Factory evaluatorSupplier) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { - return new FilterOperator(evaluatorSupplier.get()); + return new FilterOperator(evaluatorSupplier.get(driverContext)); } @Override public String describe() { - return "FilterOperator[evaluator=" + evaluatorSupplier.get() + "]"; + return "FilterOperator[evaluator=" + evaluatorSupplier.get(DriverContext.DEFAULT) + "]"; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java index 7cfb080dc8c3e..03cd1442e3b9e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java @@ -15,8 +15,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; - -import java.util.function.Supplier; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; /** * Utilities to remove duplicates from multivalued fields. @@ -77,42 +76,39 @@ public static Block dedupeToBlockUsingCopyAndSort(Block block) { * Build and {@link EvalOperator.ExpressionEvaluator} that deduplicates values * using an adaptive algorithm based on the size of the input list. */ - public static Supplier evaluator( - ElementType elementType, - Supplier nextSupplier - ) { + public static ExpressionEvaluator.Factory evaluator(ElementType elementType, ExpressionEvaluator.Factory nextSupplier) { return switch (elementType) { - case BOOLEAN -> () -> new MvDedupeEvaluator(nextSupplier.get()) { + case BOOLEAN -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override public Block eval(Page page) { return new MultivalueDedupeBoolean((BooleanBlock) field.eval(page)).dedupeToBlock(); } }; - case BYTES_REF -> () -> new MvDedupeEvaluator(nextSupplier.get()) { + case BYTES_REF -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override public Block eval(Page page) { return new MultivalueDedupeBytesRef((BytesRefBlock) field.eval(page)).dedupeToBlockAdaptive(); } }; - case INT -> () -> new MvDedupeEvaluator(nextSupplier.get()) { + case INT -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override public Block eval(Page page) { return new MultivalueDedupeInt((IntBlock) field.eval(page)).dedupeToBlockAdaptive(); } }; - case LONG -> () -> new MvDedupeEvaluator(nextSupplier.get()) { + case LONG -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override public Block eval(Page page) { return new MultivalueDedupeLong((LongBlock) field.eval(page)).dedupeToBlockAdaptive(); } }; - case DOUBLE -> () -> new MvDedupeEvaluator(nextSupplier.get()) { + case DOUBLE -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override public Block eval(Page page) { return new MultivalueDedupeDouble((DoubleBlock) field.eval(page)).dedupeToBlockAdaptive(); } }; - case NULL -> () -> new MvDedupeEvaluator(nextSupplier.get()) { + case NULL -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { @Override public Block eval(Page page) { return field.eval(page); // The page is all nulls and when you dedupe that it's still all nulls diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java index d512f40e0dcbb..92ec89a12aa78 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/StringExtractOperator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import java.util.Arrays; import java.util.Map; @@ -24,13 +25,13 @@ public class StringExtractOperator extends AbstractPageMappingOperator { public record StringExtractOperatorFactory( String[] fieldNames, - Supplier expressionEvaluator, + ExpressionEvaluator.Factory expressionEvaluator, Supplier>> parserSupplier ) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { - return new StringExtractOperator(fieldNames, expressionEvaluator.get(), parserSupplier.get()); + return new StringExtractOperator(fieldNames, expressionEvaluator.get(driverContext), parserSupplier.get()); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index da67f9e6a68c0..c9b8dae1f9671 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -47,7 +47,7 @@ public String toString() { @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { Supplier expEval = () -> new FirstWord(0); - return new ColumnExtractOperator.Factory(new ElementType[] { ElementType.BYTES_REF }, () -> page -> page.getBlock(0), expEval); + return new ColumnExtractOperator.Factory(new ElementType[] { ElementType.BYTES_REF }, dvrCtx -> page -> page.getBlock(0), expEval); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 2143e77d3ffc6..486a4be23f4c7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -40,7 +40,7 @@ public Block eval(Page page) { @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return new EvalOperator.EvalOperatorFactory(() -> new Addition(0, 1)); + return new EvalOperator.EvalOperatorFactory(dvrCtx -> new Addition(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index 4724e09324fd5..146bafcd628e8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -42,7 +42,7 @@ public Block eval(Page page) { @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return new FilterOperator.FilterOperatorFactory(() -> new SameLastDigit(0, 1)); + return new FilterOperator.FilterOperatorFactory(dvrCtx -> new SameLastDigit(0, 1)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index 12bc5da607934..70cef5adec308 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -42,7 +42,7 @@ public Map apply(String s) { @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { Supplier>> expEval = () -> new FirstWord("test"); - return new StringExtractOperator.StringExtractOperatorFactory(new String[] { "test" }, () -> page -> page.getBlock(0), expEval); + return new StringExtractOperator.StringExtractOperatorFactory(new String[] { "test" }, dvrCtx -> page -> page.getBlock(0), expEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java index 1afe66b50d318..7e74b54b74086 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvalua private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public EqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java index 296fba7c86d4f..0f52038f67ec7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEval private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public EqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java index eb45790ce8cec..10491dcdf73dd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public EqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java index d3c0ae0cb447d..aa2d09be9bf72 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEva private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public EqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java index 057e248176ca4..ad262b88c1641 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvalua private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public EqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index d351c5a4b4dc9..ed6da288d1e75 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class GreaterThanDoublesEvaluator implements EvalOperator.Expressio private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java index ce6ca2dc10633..2ae3c956c5199 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEv private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index caf53df3cef0c..53b0fe8dc8ef9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class GreaterThanKeywordsEvaluator implements EvalOperator.Expressi private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java index b776ee8bbe65f..595f8f9fb1172 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index 0febed41220b7..dd5eb8d86467a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.Ex private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 9d74aa46b15d7..05d173d4e99b1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.Expre private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index d3a38217a0323..d4bc72d576c7b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.E private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index 287fbf734bdee..5f7a4ad79217f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.Expr private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public GreaterThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java index 016f17241cfe0..065486d6a38fa 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEv private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java index ec4f304a886ae..a47a4cc72fafb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvalu private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java index a0ae3738eff17..d1d024075feef 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java index 1174bd148643b..2aa35ea470649 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEval private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index bba1892aae9be..21a4bb97f42ea 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.Expre private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 474743dcb89a5..db6130e498e2a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.Expressi private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index 61d5fecaeb348..46849d4b450f1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.Expr private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index f96f858f631ba..4e05484d18f30 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.Express private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public LessThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index 811e7eaafc384..a68b972ed0c52 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEva private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public NotEqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index 849cd2a456de6..fee6915f54e05 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public NotEqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java index c34ccdc3d9fc4..e488cd0d16c23 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEval private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public NotEqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 880dd4f9d2d61..0f6a113062826 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class NotEqualsKeywordsEvaluator implements EvalOperator.Expression private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public NotEqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java index b06b8e3523a4d..21384ead1e745 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEva private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public NotEqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index c4ee4b44ca14c..98384f9cf0203 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public NotEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public NotEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java index bea4a65bc6acc..001b0e1702f61 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -25,10 +26,13 @@ public final class RegexMatchEvaluator implements EvalOperator.ExpressionEvaluat private final CharacterRunAutomaton pattern; - public RegexMatchEvaluator(EvalOperator.ExpressionEvaluator input, - CharacterRunAutomaton pattern) { + private final DriverContext driverContext; + + public RegexMatchEvaluator(EvalOperator.ExpressionEvaluator input, CharacterRunAutomaton pattern, + DriverContext driverContext) { this.input = input; this.pattern = pattern; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index e5e63c5a22a03..463c10a14ee5e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public GreatestBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public GreatestBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index a6ea67a79f4b1..f6d6b62b5d3bd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,12 @@ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public GreatestBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public GreatestBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index 78701ba2a417f..22ae82dc0d0ac 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public GreatestDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public GreatestDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 9f1e98b358c84..c2cc45bc8b180 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public GreatestIntEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public GreatestIntEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 5f3a8bdbf5dab..e148c55b36d61 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public GreatestLongEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public GreatestLongEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index ebf3028d80869..8aa566235c035 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public LeastBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public LeastBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index 46fcbbca1326d..c4dd29e583169 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,12 @@ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public LeastBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public LeastBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index c046cc2723e7e..43ada40582c31 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public LeastDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public LeastDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index 0c1d30eeee3b7..15396c59dfa2c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,11 @@ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public LeastIntEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public LeastIntEvaluator(EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 314ea40744030..2115d9c19b7fb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public LeastLongEvaluator(EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public LeastLongEvaluator(EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index 920581cdeaf80..bae85f15dc525 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -25,11 +26,14 @@ public final class DateExtractConstantEvaluator implements EvalOperator.Expressi private final ZoneId zone; + private final DriverContext driverContext; + public DateExtractConstantEvaluator(EvalOperator.ExpressionEvaluator value, - ChronoField chronoField, ZoneId zone) { + ChronoField chronoField, ZoneId zone, DriverContext driverContext) { this.value = value; this.chronoField = chronoField; this.zone = zone; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index f4893d59a9f2d..0704a84e9d39e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -32,12 +33,15 @@ public final class DateExtractEvaluator implements EvalOperator.ExpressionEvalua private final ZoneId zone; + private final DriverContext driverContext; + public DateExtractEvaluator(Source source, EvalOperator.ExpressionEvaluator value, - EvalOperator.ExpressionEvaluator chronoField, ZoneId zone) { + EvalOperator.ExpressionEvaluator chronoField, ZoneId zone, DriverContext driverContext) { this.warnings = new Warnings(source); this.value = value; this.chronoField = chronoField; this.zone = zone; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index dc2c041532bb8..fdfc28b0af3b9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio private final DateFormatter formatter; - public DateFormatConstantEvaluator(EvalOperator.ExpressionEvaluator val, - DateFormatter formatter) { + private final DriverContext driverContext; + + public DateFormatConstantEvaluator(EvalOperator.ExpressionEvaluator val, DateFormatter formatter, + DriverContext driverContext) { this.val = val; this.formatter = formatter; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index 847cbc011f8ad..7fbb705c95335 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -27,11 +28,14 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat private final Locale locale; + private final DriverContext driverContext; + public DateFormatEvaluator(EvalOperator.ExpressionEvaluator val, - EvalOperator.ExpressionEvaluator formatter, Locale locale) { + EvalOperator.ExpressionEvaluator formatter, Locale locale, DriverContext driverContext) { this.val = val; this.formatter = formatter; this.locale = locale; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 7130476b96e23..8a714985c666d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -29,11 +30,14 @@ public final class DateParseConstantEvaluator implements EvalOperator.Expression private final DateFormatter formatter; + private final DriverContext driverContext; + public DateParseConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, - DateFormatter formatter) { + DateFormatter formatter, DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index a4f79021c1e70..cc0cfc62a1921 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -31,12 +32,15 @@ public final class DateParseEvaluator implements EvalOperator.ExpressionEvaluato private final ZoneId zoneId; + private final DriverContext driverContext; + public DateParseEvaluator(Source source, EvalOperator.ExpressionEvaluator val, - EvalOperator.ExpressionEvaluator formatter, ZoneId zoneId) { + EvalOperator.ExpressionEvaluator formatter, ZoneId zoneId, DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.zoneId = zoneId; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index 42d9fc3250919..79c36712313a0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -22,9 +23,13 @@ public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluato private final Rounding.Prepared rounding; - public DateTruncEvaluator(EvalOperator.ExpressionEvaluator fieldVal, Rounding.Prepared rounding) { + private final DriverContext driverContext; + + public DateTruncEvaluator(EvalOperator.ExpressionEvaluator fieldVal, Rounding.Prepared rounding, + DriverContext driverContext) { this.fieldVal = fieldVal; this.rounding = rounding; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index 290662cfc33ba..49cf0727b4781 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -18,8 +19,11 @@ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { private final long now; - public NowEvaluator(long now) { + private final DriverContext driverContext; + + public NowEvaluator(long now, DriverContext driverContext) { this.now = now; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index d87502789de97..6241093a607c8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -25,10 +26,13 @@ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluato private final EvalOperator.ExpressionEvaluator[] cidrs; + private final DriverContext driverContext; + public CIDRMatchEvaluator(EvalOperator.ExpressionEvaluator ip, - EvalOperator.ExpressionEvaluator[] cidrs) { + EvalOperator.ExpressionEvaluator[] cidrs, DriverContext driverContext) { this.ip = ip; this.cidrs = cidrs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index 8250081a5ddd8..f9e8f72d981c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,12 @@ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator fieldVal; - public AbsDoubleEvaluator(EvalOperator.ExpressionEvaluator fieldVal) { + private final DriverContext driverContext; + + public AbsDoubleEvaluator(EvalOperator.ExpressionEvaluator fieldVal, + DriverContext driverContext) { this.fieldVal = fieldVal; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 1282d3f7401d6..e974bd69063e5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator fieldVal; - public AbsIntEvaluator(EvalOperator.ExpressionEvaluator fieldVal) { + private final DriverContext driverContext; + + public AbsIntEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { this.fieldVal = fieldVal; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 3d87f8007d4ba..f2e01666c25e6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator fieldVal; - public AbsLongEvaluator(EvalOperator.ExpressionEvaluator fieldVal) { + private final DriverContext driverContext; + + public AbsLongEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { this.fieldVal = fieldVal; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 3d95122009b7d..c966c0599bb51 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class AcosEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public AcosEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public AcosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 61cced3385905..159734187f8e5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class AsinEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public AsinEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public AsinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index 4ea7cb15c625c..3a3bcf2594495 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,9 +22,13 @@ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator x; - public Atan2Evaluator(EvalOperator.ExpressionEvaluator y, EvalOperator.ExpressionEvaluator x) { + private final DriverContext driverContext; + + public Atan2Evaluator(EvalOperator.ExpressionEvaluator y, EvalOperator.ExpressionEvaluator x, + DriverContext driverContext) { this.y = y; this.x = x; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index fac99b790d262..6896e65441c86 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public AtanEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public AtanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index 95105fce34831..61709f909b850 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,11 @@ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public CastIntToDoubleEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public CastIntToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 76f4bc3a89cb3..83628ea5962d7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,11 @@ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public CastIntToLongEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public CastIntToLongEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index 7b8bfc2b8d199..4e52416b98654 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,12 @@ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public CastIntToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public CastIntToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 22d3f4f5b8c48..f3a5501edf781 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,12 @@ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public CastLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public CastLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index de215d3e22373..23179757e7532 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,12 @@ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public CastLongToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public CastLongToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index d1e009c2a0b2e..0f149bf7ae340 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,12 @@ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public CastUnsignedLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public CastUnsignedLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index a951cb8c30b0b..79f8143564a42 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 226bca608e01e..bcf054989d31c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public CosEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public CosEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 2ff9dc6e8ef4d..73819619937ed 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class CoshEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public CoshEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public CoshEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index d7b5a1263e85d..75db4ac7b2b38 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public FloorDoubleEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public FloorDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java index 233c95aea3cfd..f17b7dfe91c21 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,11 @@ public final class IsFiniteEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public IsFiniteEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public IsFiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java index b53623bc48514..37b1f58efe49f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,11 @@ public final class IsInfiniteEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public IsInfiniteEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public IsInfiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java index c947eb5126c45..1fff9b81f08e9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,11 @@ public final class IsNaNEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public IsNaNEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public IsNaNEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 15eaa7388944f..03175592f1df3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class Log10DoubleEvaluator implements EvalOperator.ExpressionEvalua private final EvalOperator.ExpressionEvaluator val; - public Log10DoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public Log10DoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index c3c95930836cd..11ce92c7f17ff 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,9 +26,13 @@ public final class Log10IntEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator val; - public Log10IntEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public Log10IntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index e8c1e590b9084..658f0575e9e94 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,9 +26,13 @@ public final class Log10LongEvaluator implements EvalOperator.ExpressionEvaluato private final EvalOperator.ExpressionEvaluator val; - public Log10LongEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public Log10LongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index faa75ee571eb9..e179ac4cb8f27 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,9 +26,13 @@ public final class Log10UnsignedLongEvaluator implements EvalOperator.Expression private final EvalOperator.ExpressionEvaluator val; - public Log10UnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public Log10UnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java index 0ecd7f5455942..6ff5f5f0d510e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class PowDoubleEvaluator implements EvalOperator.ExpressionEvaluato private final EvalOperator.ExpressionEvaluator exponent; + private final DriverContext driverContext; + public PowDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent) { + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { this.warnings = new Warnings(source); this.base = base; this.exponent = exponent; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java index 65a20463c26e1..70a663d2b933e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -27,11 +28,14 @@ public final class PowIntEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator exponent; + private final DriverContext driverContext; + public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent) { + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { this.warnings = new Warnings(source); this.base = base; this.exponent = exponent; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java index 99ee34f7ee770..870f0b75dedca 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -27,11 +28,14 @@ public final class PowLongEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator exponent; + private final DriverContext driverContext; + public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent) { + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { this.warnings = new Warnings(source); this.base = base; this.exponent = exponent; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 52a51ba610d38..4d4a3deb2cc7a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvalua private final EvalOperator.ExpressionEvaluator decimals; + private final DriverContext driverContext; + public RoundDoubleEvaluator(EvalOperator.ExpressionEvaluator val, - EvalOperator.ExpressionEvaluator decimals) { + EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { this.val = val; this.decimals = decimals; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index 671aaf5f3d029..eeb5eedf25f0d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,12 @@ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public RoundDoubleNoDecimalsEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public RoundDoubleNoDecimalsEvaluator(EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index f178a571b7e9d..e73bbc05f72c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -23,10 +24,13 @@ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator decimals; + private final DriverContext driverContext; + public RoundIntEvaluator(EvalOperator.ExpressionEvaluator val, - EvalOperator.ExpressionEvaluator decimals) { + EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { this.val = val; this.decimals = decimals; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index 12f193fe216f3..e2bfc94b4205f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluato private final EvalOperator.ExpressionEvaluator decimals; + private final DriverContext driverContext; + public RoundLongEvaluator(EvalOperator.ExpressionEvaluator val, - EvalOperator.ExpressionEvaluator decimals) { + EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { this.val = val; this.decimals = decimals; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 1a8247ba34c80..15bbc619a66d6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class RoundUnsignedLongEvaluator implements EvalOperator.Expression private final EvalOperator.ExpressionEvaluator decimals; + private final DriverContext driverContext; + public RoundUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, - EvalOperator.ExpressionEvaluator decimals) { + EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { this.val = val; this.decimals = decimals; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index aa1649021be09..a7483e84ee730 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public SinEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public SinEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 43ec78d3289f8..ba985f76fac20 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class SinhEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public SinhEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public SinhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index c60176d3e7135..39d15c6e143cc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class SqrtDoubleEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator val; - public SqrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public SqrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index 1241e35e8f5db..6fc49da574015 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,9 +26,13 @@ public final class SqrtIntEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator val; - public SqrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public SqrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 8dc27fada343a..8506b02f1aa27 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -25,9 +26,13 @@ public final class SqrtLongEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator val; - public SqrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public SqrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.warnings = new Warnings(source); this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index c72a767e90a13..1b01e45679ad4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,8 +22,12 @@ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public SqrtUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public SqrtUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 0c8de1fe98abc..387a4b6148e1e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public TanEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public TanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 207ae56fb227d..451a85aa71c22 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public TanhEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public TanhEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java index 775723ee66c0b..695844228c9ea 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -18,8 +19,11 @@ * This class is generated. Do not edit it. */ public final class MvAvgDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvAvgDoubleEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvAvgDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java index 655d9fdbe97a4..e85741f4ec501 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -19,8 +20,11 @@ * This class is generated. Do not edit it. */ public final class MvAvgIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvAvgIntEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvAvgIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java index 95dee758eaa32..b96096b8fe22e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -19,8 +20,11 @@ * This class is generated. Do not edit it. */ public final class MvAvgLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvAvgLongEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvAvgLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java index 97845cd82e105..b0e0864c7017c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -19,8 +20,12 @@ * This class is generated. Do not edit it. */ public final class MvAvgUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvAvgUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvAvgUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index a2cf3af0bd9e2..9ab7e6919d68e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,12 @@ * This class is generated. Do not edit it. */ public final class MvMaxBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMaxBooleanEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMaxBooleanEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index b96d9830b9cdc..5101d380c447e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class MvMaxBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMaxBytesRefEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMaxBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index 0465808883020..4898a2ddac0ab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMaxDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMaxDoubleEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMaxDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index e166fa38a1eae..fdeb4c4c5a469 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMaxIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMaxIntEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMaxIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index fe72bdd726c20..99e79f59160a6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMaxLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMaxLongEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMaxLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java index 266b2b8e3d4f9..ab2422df3b7b1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,12 @@ * This class is generated. Do not edit it. */ public final class MvMedianDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMedianDoubleEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMedianDoubleEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index d20f953a6d34c..f66e1b65ed131 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMedianIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMedianIntEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMedianIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index 710d79e8aa6f6..7ced56110af42 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,12 @@ * This class is generated. Do not edit it. */ public final class MvMedianLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMedianLongEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMedianLongEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index cfefbb492d53e..162608e055374 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,12 @@ * This class is generated. Do not edit it. */ public final class MvMedianUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMedianUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMedianUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index 5e3697243d9cb..3a67c042d82f0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BooleanArrayVector; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,12 @@ * This class is generated. Do not edit it. */ public final class MvMinBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMinBooleanEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMinBooleanEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index 74173a3d18e5b..538cf9dbed307 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefArrayVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,12 @@ * This class is generated. Do not edit it. */ public final class MvMinBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMinBytesRefEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMinBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index 5fd2d66a2afce..905c6dc87eaf1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMinDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMinDoubleEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMinDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index 37d3b5c98778b..f71ea2a663314 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMinIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMinIntEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMinIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index f0f0734e8d176..da44e992c266f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -17,8 +18,11 @@ * This class is generated. Do not edit it. */ public final class MvMinLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvMinLongEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvMinLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java index e945863404fa7..4b7903ae2b4c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -18,8 +19,11 @@ * This class is generated. Do not edit it. */ public final class MvSumDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { - public MvSumDoubleEvaluator(EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvSumDoubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(field); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index e1217cae07ec3..da7abdfd1efd4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,9 +21,13 @@ public final class MvSumIntEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Warnings warnings; - public MvSumIntEvaluator(Source source, EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvSumIntEvaluator(Source source, EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); this.warnings = new Warnings(source); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index 4f5c301448b43..f669e88e1aeba 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,9 +21,13 @@ public final class MvSumLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Warnings warnings; - public MvSumLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvSumLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); this.warnings = new Warnings(source); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index 6e78f1e851921..18773ea49f245 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -9,6 +9,7 @@ import java.lang.String; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,9 +21,13 @@ public final class MvSumUnsignedLongEvaluator extends AbstractMultivalueFunction.AbstractNullableEvaluator { private final Warnings warnings; - public MvSumUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field) { + private final DriverContext driverContext; + + public MvSumUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { super(field); this.warnings = new Warnings(source); + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index 0d0d9dd23091e..868e6338536e8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,9 +25,13 @@ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator[] values; - public ConcatEvaluator(BytesRefBuilder scratch, EvalOperator.ExpressionEvaluator[] values) { + private final DriverContext driverContext; + + public ConcatEvaluator(BytesRefBuilder scratch, EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { this.scratch = scratch; this.values = values; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 19bb7f4cb4e6a..594f521c76da5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,11 @@ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public LTrimEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public LTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 23ccb4d544331..05da38fe7d09d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -29,12 +30,16 @@ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator length; + private final DriverContext driverContext; + public LeftEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, - EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length) { + EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, + DriverContext driverContext) { this.out = out; this.cp = cp; this.str = str; this.length = length; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index c32ebf511dc99..38d9adaf1184c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -22,8 +23,11 @@ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public LengthEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public LengthEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index 946542427c53b..f1a07fbcbd7f4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,11 @@ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public RTrimEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public RTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index f075c37d05dd9..35d05b53faf04 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -29,12 +30,16 @@ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator length; + private final DriverContext driverContext; + public RightEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, - EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length) { + EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, + DriverContext driverContext) { this.out = out; this.cp = cp; this.str = str; this.length = length; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 5f721c3d8ad88..29ee86b1aeca2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,11 +25,14 @@ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEv private final BytesRef scratch; + private final DriverContext driverContext; + public SplitSingleByteEvaluator(EvalOperator.ExpressionEvaluator str, byte delim, - BytesRef scratch) { + BytesRef scratch, DriverContext driverContext) { this.str = str; this.delim = delim; this.scratch = scratch; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index 090f580b8ce06..8577aa7cc3fc7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,11 +25,14 @@ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEval private final BytesRef scratch; + private final DriverContext driverContext; + public SplitVariableEvaluator(EvalOperator.ExpressionEvaluator str, - EvalOperator.ExpressionEvaluator delim, BytesRef scratch) { + EvalOperator.ExpressionEvaluator delim, BytesRef scratch, DriverContext driverContext) { this.str = str; this.delim = delim; this.scratch = scratch; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 2fe2c13ca0659..1ec6f97836025 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator prefix; + private final DriverContext driverContext; + public StartsWithEvaluator(EvalOperator.ExpressionEvaluator str, - EvalOperator.ExpressionEvaluator prefix) { + EvalOperator.ExpressionEvaluator prefix, DriverContext driverContext) { this.str = str; this.prefix = prefix; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index 75a35bee93db8..3c86a51c513dc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -26,11 +27,15 @@ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluato private final EvalOperator.ExpressionEvaluator length; + private final DriverContext driverContext; + public SubstringEvaluator(EvalOperator.ExpressionEvaluator str, - EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator length) { + EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator length, + DriverContext driverContext) { this.str = str; this.start = start; this.length = length; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index 71fb35a06dfa3..cbe96b7056a75 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -24,10 +25,13 @@ public final class SubstringNoLengthEvaluator implements EvalOperator.Expression private final EvalOperator.ExpressionEvaluator start; + private final DriverContext driverContext; + public SubstringNoLengthEvaluator(EvalOperator.ExpressionEvaluator str, - EvalOperator.ExpressionEvaluator start) { + EvalOperator.ExpressionEvaluator start, DriverContext driverContext) { this.str = str; this.start = start; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index d276116afbf55..dceffc22cec7c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -20,8 +21,11 @@ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator val; - public TrimEvaluator(EvalOperator.ExpressionEvaluator val) { + private final DriverContext driverContext; + + public TrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.val = val; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index 84d8809454f8d..5562085ba01fd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -28,11 +29,14 @@ public final class AddDatetimesEvaluator implements EvalOperator.ExpressionEvalu private final TemporalAmount temporalAmount; + private final DriverContext driverContext; + public AddDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator datetime, - TemporalAmount temporalAmount) { + TemporalAmount temporalAmount, DriverContext driverContext) { this.warnings = new Warnings(source); this.datetime = datetime; this.temporalAmount = temporalAmount; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index af04a4c68e021..86a4f83045725 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public AddDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index 35d80b4604c74..1c9e2227af750 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class AddIntsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public AddIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index 0a66f66d8da7e..80d401d42bf02 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class AddLongsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public AddLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 1bf3b8fa3ddb7..e48572f7980d1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class AddUnsignedLongsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public AddUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index 4b13bc1c5c072..e4ce48a844de3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public DivDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index 5cdc73fbd99bb..8b905ca7a0a4f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class DivIntsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public DivIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 7a7311152f924..155beeff58f45 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class DivLongsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public DivLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index de0ce3aafb46f..f5c949351bb0f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class DivUnsignedLongsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public DivUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 0698e816d8a86..7abb9cdca124d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public ModDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index c1af534a07da4..4ba618a162e60 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class ModIntsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public ModIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index 295724e041211..8fb43d2968947 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class ModLongsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public ModLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index 95621fb9ef61b..664d3219e0319 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class ModUnsignedLongsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public ModUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 0a0fbebbe18d6..d5079ce565c14 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public MulDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index e94d174f8249c..e5b9917c409e8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class MulIntsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public MulIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index ffa437390fead..002793ec781d9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class MulLongsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public MulLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index c9d1b95851ed0..fe0d1ae2f6e7b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class MulUnsignedLongsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public MulUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 3a54d490bb003..e5d1284ade262 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -19,8 +20,11 @@ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluator { private final EvalOperator.ExpressionEvaluator v; - public NegDoublesEvaluator(EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public NegDoublesEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index aefa05097cb96..526ce41bd71d6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class NegIntsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator v; - public NegIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public NegIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { this.warnings = new Warnings(source); this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index 5bd301b8f76fc..21b02d8f0783a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,9 +25,13 @@ public final class NegLongsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator v; - public NegLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator v) { + private final DriverContext driverContext; + + public NegLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { this.warnings = new Warnings(source); this.v = v; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index d6e94ce2218ed..681edc68f326e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -28,11 +29,14 @@ public final class SubDatetimesEvaluator implements EvalOperator.ExpressionEvalu private final TemporalAmount temporalAmount; + private final DriverContext driverContext; + public SubDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator datetime, - TemporalAmount temporalAmount) { + TemporalAmount temporalAmount, DriverContext driverContext) { this.warnings = new Warnings(source); this.datetime = datetime; this.temporalAmount = temporalAmount; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index c245ad03a0cea..bf69f6a7a9a81 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; /** @@ -21,10 +22,13 @@ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluat private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public SubDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index bc942ca7522fb..8d5afb8113a99 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class SubIntsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public SubIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index 4b22842c74d8a..5f561c9778b18 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class SubLongsEvaluator implements EvalOperator.ExpressionEvaluator private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public SubLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 76ae796c5205a..a30d9d973c2d8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,11 +27,14 @@ public final class SubUnsignedLongsEvaluator implements EvalOperator.ExpressionE private final EvalOperator.ExpressionEvaluator rhs; + private final DriverContext driverContext; + public SubUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, - EvalOperator.ExpressionEvaluator rhs) { + EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; + this.driverContext = driverContext; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 2f8f09b9ff02b..3688a0633aca9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -35,7 +35,6 @@ import java.util.List; import java.util.function.IntFunction; -import java.util.function.Supplier; public final class EvalMapper { @@ -59,7 +58,7 @@ public final class EvalMapper { private EvalMapper() {} @SuppressWarnings({ "rawtypes", "unchecked" }) - public static Supplier toEvaluator(Expression exp, Layout layout) { + public static ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { if (exp instanceof EvaluatorMapper m) { return m.toEvaluator(e -> toEvaluator(e, layout)); } @@ -73,9 +72,9 @@ public static Supplier toEvaluator(Expression exp, Layout l static class BooleanLogic extends ExpressionMapper { @Override - public Supplier map(BinaryLogic bc, Layout layout) { - Supplier leftEval = toEvaluator(bc.left(), layout); - Supplier rightEval = toEvaluator(bc.right(), layout); + public ExpressionEvaluator.Factory map(BinaryLogic bc, Layout layout) { + var leftEval = toEvaluator(bc.left(), layout); + var rightEval = toEvaluator(bc.right(), layout); /** * Evaluator for the three-valued boolean expressions. * We can't generate these with the {@link Evaluator} annotation because that @@ -140,21 +139,24 @@ private Block eval(BooleanVector lhs, BooleanVector rhs) { } } - return () -> new BooleanLogicExpressionEvaluator(bc, leftEval.get(), rightEval.get()); + return driverContext -> new BooleanLogicExpressionEvaluator(bc, leftEval.get(driverContext), rightEval.get(driverContext)); } } static class Nots extends ExpressionMapper { @Override - public Supplier map(Not not, Layout layout) { - Supplier expEval = toEvaluator(not.field(), layout); - return () -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical.NotEvaluator(expEval.get()); + public ExpressionEvaluator.Factory map(Not not, Layout layout) { + var expEval = toEvaluator(not.field(), layout); + return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical.NotEvaluator( + expEval.get(dvrCtx), + dvrCtx + ); } } static class Attributes extends ExpressionMapper { @Override - public Supplier map(Attribute attr, Layout layout) { + public ExpressionEvaluator.Factory map(Attribute attr, Layout layout) { record Attribute(int channel) implements ExpressionEvaluator { @Override public Block eval(Page page) { @@ -162,14 +164,14 @@ public Block eval(Page page) { } } int channel = layout.get(attr.id()).channel(); - return () -> new Attribute(channel); + return driverContext -> new Attribute(channel); } } static class Literals extends ExpressionMapper { @Override - public Supplier map(Literal lit, Layout layout) { + public ExpressionEvaluator.Factory map(Literal lit, Layout layout) { record LiteralsEvaluator(IntFunction block) implements ExpressionEvaluator { @Override public Block eval(Page page) { @@ -188,7 +190,7 @@ public String toString() { return lit.toString(); } }; - return () -> new LiteralsEvaluator(blockClosure); + return driverContext -> new LiteralsEvaluator(blockClosure); } private IntFunction block(Literal lit) { @@ -214,9 +216,9 @@ private IntFunction block(Literal lit) { static class IsNulls extends ExpressionMapper { @Override - public Supplier map(IsNull isNull, Layout layout) { - Supplier field = toEvaluator(isNull.field(), layout); - return () -> new IsNullEvaluator(field.get()); + public ExpressionEvaluator.Factory map(IsNull isNull, Layout layout) { + var field = toEvaluator(isNull.field(), layout); + return driverContext -> new IsNullEvaluator(field.get(driverContext)); } record IsNullEvaluator(EvalOperator.ExpressionEvaluator field) implements EvalOperator.ExpressionEvaluator { @@ -238,9 +240,9 @@ public Block eval(Page page) { static class IsNotNulls extends ExpressionMapper { @Override - public Supplier map(IsNotNull isNotNull, Layout layout) { - Supplier field = toEvaluator(isNotNull.field(), layout); - return () -> new IsNotNullEvaluator(field.get()); + public ExpressionEvaluator.Factory map(IsNotNull isNotNull, Layout layout) { + var field = toEvaluator(isNotNull.field(), layout); + return driverContext -> new IsNotNullEvaluator(field.get(driverContext)); } record IsNotNullEvaluator(EvalOperator.ExpressionEvaluator field) implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index d15aa0e28871b..aa4d9235bdb40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -8,11 +8,11 @@ package org.elasticsearch.xpack.esql.evaluator.mapper; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.ql.expression.Expression; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.compute.data.BlockUtils.fromArrayRow; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; @@ -21,7 +21,7 @@ * Expressions that have a mapping to an {@link ExpressionEvaluator}. */ public interface EvaluatorMapper { - Supplier toEvaluator(Function> toEvaluator); + ExpressionEvaluator.Factory toEvaluator(Function toEvaluator); /** * Fold using {@link #toEvaluator} so you don't need a "by hand" @@ -30,6 +30,9 @@ public interface EvaluatorMapper { * good enough. */ default Object fold() { - return toJavaObject(toEvaluator(e -> () -> p -> fromArrayRow(e.fold())[0]).get().eval(new Page(1)), 0); + return toJavaObject( + toEvaluator(e -> driverContext -> p -> fromArrayRow(e.fold())[0]).get(DriverContext.DEFAULT).eval(new Page(1)), + 0 + ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java index b574bb08eeea5..9657fd0c6ffc0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/ExpressionMapper.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.evaluator.mapper; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.util.ReflectionUtils; -import java.util.function.Supplier; - public abstract class ExpressionMapper { public final Class typeToken; @@ -21,5 +19,5 @@ public ExpressionMapper() { typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass()); } - public abstract Supplier map(E expression, Layout layout); + public abstract ExpressionEvaluator.Factory map(E expression, Layout layout); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index 592a9f3d99499..cb10499ae6d0b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; import org.elasticsearch.common.TriFunction; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; @@ -20,9 +22,6 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.util.function.BiFunction; -import java.util.function.Supplier; - import static org.elasticsearch.xpack.esql.evaluator.EvalMapper.toEvaluator; public abstract class ComparisonMapper extends ExpressionMapper { @@ -76,18 +75,18 @@ public abstract class ComparisonMapper extends Expre ) { }; - private final BiFunction ints; - private final BiFunction longs; - private final BiFunction doubles; - private final BiFunction keywords; - private final BiFunction bools; + private final TriFunction ints; + private final TriFunction longs; + private final TriFunction doubles; + private final TriFunction keywords; + private final TriFunction bools; private ComparisonMapper( - BiFunction ints, - BiFunction longs, - BiFunction doubles, - BiFunction keywords, - BiFunction bools + TriFunction ints, + TriFunction longs, + TriFunction doubles, + TriFunction keywords, + TriFunction bools ) { this.ints = ints; this.longs = longs; @@ -97,20 +96,20 @@ private ComparisonMapper( } ComparisonMapper( - BiFunction ints, - BiFunction longs, - BiFunction doubles, - BiFunction keywords + TriFunction ints, + TriFunction longs, + TriFunction doubles, + TriFunction keywords ) { this.ints = ints; this.longs = longs; this.doubles = doubles; this.keywords = keywords; - this.bools = (lhs, rhs) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; + this.bools = (lhs, rhs, dvrCtx) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; } @Override - public final Supplier map(BinaryComparison bc, Layout layout) { + public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) { DataType leftType = bc.left().dataType(); if (leftType.isNumeric()) { DataType type = EsqlDataTypeRegistry.INSTANCE.commonType(leftType, bc.right().dataType()); @@ -128,32 +127,32 @@ public final Supplier map(BinaryComparison bc, return castToEvaluator(bc, layout, DataTypes.UNSIGNED_LONG, longs); } } - Supplier leftEval = toEvaluator(bc.left(), layout); - Supplier rightEval = toEvaluator(bc.right(), layout); + var leftEval = toEvaluator(bc.left(), layout); + var rightEval = toEvaluator(bc.right(), layout); if (leftType == DataTypes.KEYWORD || leftType == DataTypes.TEXT || leftType == DataTypes.IP || leftType == DataTypes.VERSION) { - return () -> keywords.apply(leftEval.get(), rightEval.get()); + return dvrCtx -> keywords.apply(leftEval.get(dvrCtx), rightEval.get(dvrCtx), dvrCtx); } if (leftType == DataTypes.BOOLEAN) { - return () -> bools.apply(leftEval.get(), rightEval.get()); + return dvrCtx -> bools.apply(leftEval.get(dvrCtx), rightEval.get(dvrCtx), dvrCtx); } if (leftType == DataTypes.DATETIME) { - return () -> longs.apply(leftEval.get(), rightEval.get()); + return dvrCtx -> longs.apply(leftEval.get(dvrCtx), rightEval.get(dvrCtx), dvrCtx); } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); } - public static Supplier castToEvaluator( + public static ExpressionEvaluator.Factory castToEvaluator( BinaryOperator op, Layout layout, DataType required, - BiFunction buildEvaluator + TriFunction buildEvaluator ) { - Supplier lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); - Supplier rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); - return () -> buildEvaluator.apply(lhs.get(), rhs.get()); + var lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); + var rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); + return dvrCtx -> buildEvaluator.apply(lhs.get(dvrCtx), rhs.get(dvrCtx), dvrCtx); } - public static Supplier castToEvaluator( + public static ExpressionEvaluator.Factory castToEvaluatorWithSource( BinaryOperator op, Layout layout, DataType required, @@ -163,8 +162,8 @@ public static Supplier castToEvaluator( EvalOperator.ExpressionEvaluator, EvalOperator.ExpressionEvaluator> buildEvaluator ) { - Supplier lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); - Supplier rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); - return () -> buildEvaluator.apply(op.source(), lhs.get(), rhs.get()); + var lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); + var rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); + return dvrCtx -> buildEvaluator.apply(op.source(), lhs.get(dvrCtx), rhs.get(dvrCtx)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java index b99cccab54ba3..89df014a73705 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.planner.Layout; @@ -22,7 +23,6 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.ComparisonMapper.EQUALS; @@ -34,14 +34,14 @@ private InMapper() {} @SuppressWarnings({ "rawtypes", "unchecked" }) @Override - public Supplier map(In in, Layout layout) { - List> listEvaluators = new ArrayList<>(in.list().size()); + public ExpressionEvaluator.Factory map(In in, Layout layout) { + List listEvaluators = new ArrayList<>(in.list().size()); in.list().forEach(e -> { Equals eq = new Equals(in.source(), in.value(), e); - Supplier eqEvaluator = ((ExpressionMapper) EQUALS).map(eq, layout); + ExpressionEvaluator.Factory eqEvaluator = ((ExpressionMapper) EQUALS).map(eq, layout); listEvaluators.add(eqEvaluator); }); - return () -> new InExpressionEvaluator(listEvaluators.stream().map(Supplier::get).toList()); + return dvrCtx -> new InExpressionEvaluator(listEvaluators.stream().map(fac -> fac.get(dvrCtx)).toList()); } record InExpressionEvaluator(List listEvaluators) implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java index 4adecf0cf7109..c0fa71a59e415 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java @@ -8,23 +8,22 @@ package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.ql.expression.predicate.regex.AbstractStringPattern; import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; -import java.util.function.Supplier; - public abstract class RegexMapper extends ExpressionMapper> { public static final ExpressionMapper REGEX_MATCH = new RegexMapper() { @Override - public Supplier map(RegexMatch expression, Layout layout) { - return () -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMatchEvaluator( - EvalMapper.toEvaluator(expression.field(), layout).get(), - new CharacterRunAutomaton(((AbstractStringPattern) expression.pattern()).createAutomaton()) + public ExpressionEvaluator.Factory map(RegexMatch expression, Layout layout) { + return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMatchEvaluator( + EvalMapper.toEvaluator(expression.field(), layout).get(dvrCtx), + new CharacterRunAutomaton(((AbstractStringPattern) expression.pattern()).createAutomaton()), + dvrCtx ); } }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 88610bb989c35..fd8c400b834f0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -11,7 +11,9 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +29,6 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -151,27 +152,25 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + List conditionsEval = conditions.stream() .map(c -> new ConditionEvaluatorSupplier(toEvaluator.apply(c.condition), toEvaluator.apply(c.value))) .toList(); - Supplier elseValueEval = toEvaluator.apply(elseValue); - return () -> new CaseEvaluator( + var elseValueEval = toEvaluator.apply(elseValue); + return dvrCtx -> new CaseEvaluator( LocalExecutionPlanner.toElementType(dataType()), - conditionsEval.stream().map(Supplier::get).toList(), - elseValueEval.get() + conditionsEval.stream().map(x -> x.apply(dvrCtx)).toList(), + elseValueEval.get(dvrCtx) ); } - record ConditionEvaluatorSupplier( - Supplier condition, - Supplier value - ) implements Supplier { + record ConditionEvaluatorSupplier(ExpressionEvaluator.Factory condition, ExpressionEvaluator.Factory value) + implements + Function { @Override - public ConditionEvaluator get() { - return new ConditionEvaluator(condition.get(), value.get()); + public ConditionEvaluator apply(DriverContext driverContext) { + return new ConditionEvaluator(condition.get(driverContext), value.get(driverContext)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index b0561915e70a0..279262f72e1c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; @@ -31,7 +32,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.Stream; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; @@ -106,29 +106,42 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - List> evaluatorSuppliers = children().stream().map(toEvaluator).toList(); - Supplier> suppliers = () -> evaluatorSuppliers.stream().map(Supplier::get); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var suppliers = children().stream().map(toEvaluator).toList(); if (dataType == DataTypes.BOOLEAN) { - return () -> new GreatestBooleanEvaluator( - suppliers.get().map(MvMaxBooleanEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new GreatestBooleanEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMaxBooleanEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.DOUBLE) { - return () -> new GreatestDoubleEvaluator( - suppliers.get().map(MvMaxDoubleEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new GreatestDoubleEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMaxDoubleEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.INTEGER) { - return () -> new GreatestIntEvaluator( - suppliers.get().map(MvMaxIntEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new GreatestIntEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMaxIntEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.LONG) { - return () -> new GreatestLongEvaluator( - suppliers.get().map(MvMaxLongEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new GreatestLongEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMaxLongEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.KEYWORD @@ -137,8 +150,12 @@ public Supplier toEvaluator( || dataType == DataTypes.VERSION || dataType == DataTypes.UNSUPPORTED) { - return () -> new GreatestBytesRefEvaluator( - suppliers.get().map(MvMaxBytesRefEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new GreatestBytesRefEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMaxBytesRefEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } throw EsqlIllegalArgumentException.illegalDataType(dataType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 2871b2c54cac3..14d938f454160 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; @@ -31,7 +32,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.Stream; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; @@ -106,29 +106,42 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - List> evaluatorSuppliers = children().stream().map(toEvaluator).toList(); - Supplier> suppliers = () -> evaluatorSuppliers.stream().map(Supplier::get); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var suppliers = children().stream().map(toEvaluator).toList(); if (dataType == DataTypes.BOOLEAN) { - return () -> new LeastBooleanEvaluator( - suppliers.get().map(MvMinBooleanEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new LeastBooleanEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMinBooleanEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.DOUBLE) { - return () -> new LeastDoubleEvaluator( - suppliers.get().map(MvMinDoubleEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new LeastDoubleEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMinDoubleEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.INTEGER) { - return () -> new LeastIntEvaluator( - suppliers.get().map(MvMinIntEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new LeastIntEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMinIntEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.LONG) { - return () -> new LeastLongEvaluator( - suppliers.get().map(MvMinLongEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new LeastLongEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMinLongEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } if (dataType == DataTypes.KEYWORD @@ -137,8 +150,12 @@ public Supplier toEvaluator( || dataType == DataTypes.VERSION || dataType == DataTypes.UNSUPPORTED) { - return () -> new LeastBytesRefEvaluator( - suppliers.get().map(MvMinBytesRefEvaluator::new).toArray(EvalOperator.ExpressionEvaluator[]::new) + return dvrCtx -> new LeastBytesRefEvaluator( + suppliers.stream() + .map(es -> es.get(dvrCtx)) + .map(ev -> new MvMinBytesRefEvaluator(ev, dvrCtx)) + .toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } throw EsqlIllegalArgumentException.illegalDataType(dataType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 0360397e419b0..257ae59d5f8b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Warnings; @@ -24,7 +25,7 @@ import java.util.Locale; import java.util.Map; import java.util.function.BiFunction; -import java.util.function.Supplier; +import java.util.function.Function; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -40,13 +41,13 @@ protected AbstractConvertFunction(Source source, Expression field) { /** * Build the evaluator given the evaluator a multivalued field. */ - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { DataType sourceType = field().dataType(); var evaluator = evaluators().get(sourceType); if (evaluator == null) { throw EsqlIllegalArgumentException.illegalDataType(sourceType); } - return () -> evaluator.apply(fieldEval.get(), source()); + return dvrCtx -> evaluator.apply(fieldEval.get(dvrCtx), source()); } @Override @@ -71,9 +72,7 @@ public final Object fold() { } @Override - public final Supplier toEvaluator( - java.util.function.Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { return evaluator(toEvaluator.apply(field())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 3e9f84c2c67a1..66dbb1dd33901 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -30,7 +30,6 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; @@ -44,20 +43,24 @@ public DateExtract(Source source, Expression field, Expression chronoFieldExp, C } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier fieldEvaluator = toEvaluator.apply(children().get(0)); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEvaluator = toEvaluator.apply(children().get(0)); if (children().get(1).foldable()) { ChronoField chrono = chronoField(); if (chrono == null) { BytesRef field = (BytesRef) children().get(1).fold(); throw new EsqlIllegalArgumentException("invalid date field for [{}]: {}", sourceText(), field.utf8ToString()); } - return () -> new DateExtractConstantEvaluator(fieldEvaluator.get(), chrono, configuration().zoneId()); + return dvrCtx -> new DateExtractConstantEvaluator(fieldEvaluator.get(dvrCtx), chrono, configuration().zoneId(), dvrCtx); } - Supplier chronoEvaluator = toEvaluator.apply(children().get(1)); - return () -> new DateExtractEvaluator(source(), fieldEvaluator.get(), chronoEvaluator.get(), configuration().zoneId()); + var chronoEvaluator = toEvaluator.apply(children().get(1)); + return dvrCtx -> new DateExtractEvaluator( + source(), + fieldEvaluator.get(dvrCtx), + chronoEvaluator.get(dvrCtx), + configuration().zoneId(), + dvrCtx + ); } private ChronoField chronoField() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index cc1f649cff730..82da590db7ece 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Expression; @@ -28,7 +28,6 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -93,22 +92,25 @@ static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier fieldEvaluator = toEvaluator.apply(field); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return () -> new DateFormatConstantEvaluator(fieldEvaluator.get(), UTC_DATE_TIME_FORMATTER); + return dvrCtx -> new DateFormatConstantEvaluator(fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); } if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return () -> new DateFormatConstantEvaluator(fieldEvaluator.get(), formatter); + return dvrCtx -> new DateFormatConstantEvaluator(fieldEvaluator.get(dvrCtx), formatter, dvrCtx); } - Supplier formatEvaluator = toEvaluator.apply(format); - return () -> new DateFormatEvaluator(fieldEvaluator.get(), formatEvaluator.get(), ((EsqlConfiguration) configuration()).locale()); + var formatEvaluator = toEvaluator.apply(format); + return dvrCtx -> new DateFormatEvaluator( + fieldEvaluator.get(dvrCtx), + formatEvaluator.get(dvrCtx), + ((EsqlConfiguration) configuration()).locale(), + dvrCtx + ); } private static DateFormatter toFormatter(Object format, Locale locale) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 5d69b8d7f2219..98d75cbf672df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +27,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.common.time.DateFormatter.forPattern; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -95,13 +94,11 @@ static long process(BytesRef val, BytesRef formatter, @Fixed ZoneId zoneId) thro } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { ZoneId zone = UTC; // TODO session timezone? - Supplier fieldEvaluator = toEvaluator.apply(field); + ExpressionEvaluator.Factory fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return () -> new DateParseConstantEvaluator(source(), fieldEvaluator.get(), DEFAULT_FORMATTER); + return dvrCtx -> new DateParseConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), DEFAULT_FORMATTER, dvrCtx); } if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for date_parse [" + format.dataType() + "]"); @@ -109,13 +106,13 @@ public Supplier toEvaluator( if (format.foldable()) { try { DateFormatter formatter = toFormatter(format.fold(), zone); - return () -> new DateParseConstantEvaluator(source(), fieldEvaluator.get(), formatter); + return dvrCtx -> new DateParseConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); } catch (IllegalArgumentException e) { throw new EsqlIllegalArgumentException(e, "invalid date pattern for [{}]: {}", sourceText(), e.getMessage()); } } - Supplier formatEvaluator = toEvaluator.apply(format); - return () -> new DateParseEvaluator(source(), fieldEvaluator.get(), formatEvaluator.get(), zone); + ExpressionEvaluator.Factory formatEvaluator = toEvaluator.apply(format); + return dvrCtx -> new DateParseEvaluator(source(), fieldEvaluator.get(dvrCtx), formatEvaluator.get(dvrCtx), zone, dvrCtx); } private static DateFormatter toFormatter(Object format, ZoneId zone) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index e2c8dc5b6d85f..d7964e6c011fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.Rounding; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -25,7 +25,6 @@ import java.time.ZoneId; import java.util.concurrent.TimeUnit; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount; @@ -152,10 +151,8 @@ private static Rounding.Prepared createRounding(final Duration duration, final Z } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier fieldEvaluator = toEvaluator.apply(timestampField()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEvaluator = toEvaluator.apply(timestampField()); Expression interval = interval(); if (interval.foldable() == false) { throw new IllegalArgumentException("Function [" + sourceText() + "] has invalid interval [" + interval().sourceText() + "]."); @@ -174,10 +171,7 @@ public Supplier toEvaluator( return evaluator(fieldEvaluator, DateTrunc.createRounding(foldedInterval, zoneId())); } - public static Supplier evaluator( - Supplier fieldEvaluator, - Rounding.Prepared rounding - ) { - return () -> new DateTruncEvaluator(fieldEvaluator.get(), rounding); + public static ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEvaluator, Rounding.Prepared rounding) { + return dvrCtx -> new DateTruncEvaluator(fieldEvaluator.get(dvrCtx), rounding, dvrCtx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java index 2695177c7456a..1cf56a2f764ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java @@ -9,7 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ConfigurationFunction; @@ -22,7 +22,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; public class Now extends ConfigurationFunction implements EvaluatorMapper { @@ -73,10 +72,8 @@ protected NodeInfo info() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - return () -> new NowEvaluator(now); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + return dvrCtx -> new NowEvaluator(now, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index 1eed641f72354..36d889ea1b19a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.network.CIDRUtils; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -24,7 +25,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -55,13 +55,12 @@ public CIDRMatch(Source source, Expression ipField, List matches) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier ipEvaluatorSupplier = toEvaluator.apply(ipField); - return () -> new CIDRMatchEvaluator( - ipEvaluatorSupplier.get(), - matches.stream().map(x -> toEvaluator.apply(x).get()).toArray(EvalOperator.ExpressionEvaluator[]::new) + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var ipEvaluatorSupplier = toEvaluator.apply(ipField); + return dvrCtx -> new CIDRMatchEvaluator( + ipEvaluatorSupplier.get(dvrCtx), + matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 0d2c2f812a0bf..92b023792cb6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; @@ -20,7 +20,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; public class Abs extends UnaryScalarFunction implements EvaluatorMapper { public Abs(Source source, @Named("n") Expression n) { @@ -48,21 +47,19 @@ static int process(int fieldVal) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); if (dataType() == DataTypes.DOUBLE) { - return () -> new AbsDoubleEvaluator(field.get()); + return dvrCtx -> new AbsDoubleEvaluator(field.get(dvrCtx), dvrCtx); } if (dataType() == DataTypes.UNSIGNED_LONG) { return field; } if (dataType() == DataTypes.LONG) { - return () -> new AbsLongEvaluator(field.get()); + return dvrCtx -> new AbsLongEvaluator(field.get(dvrCtx), dvrCtx); } if (dataType() == DataTypes.INTEGER) { - return () -> new AbsIntEvaluator(field.get()); + return dvrCtx -> new AbsIntEvaluator(field.get(dvrCtx), dvrCtx); } throw EsqlIllegalArgumentException.illegalDataType(dataType()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java index cd7c01832d543..400118a1f7edf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -16,7 +18,6 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -29,14 +30,12 @@ abstract class AbstractTrigonometricFunction extends UnaryScalarFunction impleme super(source, field); } - protected abstract EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field); + protected abstract EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrDtx); @Override - public final Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier fieldEval = Cast.cast(field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field())); - return () -> doubleEvaluator(fieldEval.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEval = Cast.cast(field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field())); + return dvrCtx -> doubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index c81d6d0dbea46..74d8547cef35b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Acos(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new AcosEvaluator(source(), field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new AcosEvaluator(source(), field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index c40b1b0004f5b..3c508ac3bc7da 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Asin(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new AsinEvaluator(source(), field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new AsinEvaluator(source(), field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 7cacd88495764..c332f4d7618ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Atan(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new AtanEvaluator(field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new AtanEvaluator(field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 103c6f77f0ac2..48515ecea1ae4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -23,7 +23,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -79,12 +78,10 @@ public boolean foldable() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier yEval = Cast.cast(y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); - Supplier xEval = Cast.cast(x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); - return () -> new Atan2Evaluator(yEval.get(), xEval.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var yEval = Cast.cast(y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); + var xEval = Cast.cast(x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); + return dvrCtx -> new Atan2Evaluator(yEval.get(dvrCtx), xEval.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 321699b06a0f6..3aaca5f53e8d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -9,7 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -30,7 +30,6 @@ import java.util.List; import java.util.function.BiFunction; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FOURTH; @@ -101,9 +100,7 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { int b = ((Number) buckets.fold()).intValue(); if (field.dataType() == DataTypes.DATETIME) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java index cfe8acc1641dc..cbc40d0c2e73e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java @@ -9,54 +9,48 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.util.function.Supplier; - import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongToDouble; public class Cast { /** * Build the evaluator supplier to cast {@code in} from {@code current} to {@code required}. */ - public static Supplier cast( - DataType current, - DataType required, - Supplier in - ) { + public static ExpressionEvaluator.Factory cast(DataType current, DataType required, ExpressionEvaluator.Factory in) { if (current == required) { return in; } if (current == DataTypes.NULL || required == DataTypes.NULL) { - return () -> page -> Block.constantNullBlock(page.getPositionCount()); + return dvrCtx -> page -> Block.constantNullBlock(page.getPositionCount()); } if (required == DataTypes.DOUBLE) { if (current == DataTypes.LONG) { - return () -> new CastLongToDoubleEvaluator(in.get()); + return dvrCtx -> new CastLongToDoubleEvaluator(in.get(dvrCtx), dvrCtx); } if (current == DataTypes.INTEGER) { - return () -> new CastIntToDoubleEvaluator(in.get()); + return dvrCtx -> new CastIntToDoubleEvaluator(in.get(dvrCtx), dvrCtx); } if (current == DataTypes.UNSIGNED_LONG) { - return () -> new CastUnsignedLongToDoubleEvaluator(in.get()); + return dvrCtx -> new CastUnsignedLongToDoubleEvaluator(in.get(dvrCtx), dvrCtx); } throw cantCast(current, required); } if (required == DataTypes.UNSIGNED_LONG) { if (current == DataTypes.LONG) { - return () -> new CastLongToUnsignedLongEvaluator(in.get()); + return dvrCtx -> new CastLongToUnsignedLongEvaluator(in.get(dvrCtx), dvrCtx); } if (current == DataTypes.INTEGER) { - return () -> new CastIntToUnsignedLongEvaluator(in.get()); + return dvrCtx -> new CastIntToUnsignedLongEvaluator(in.get(dvrCtx), dvrCtx); } } if (required == DataTypes.LONG) { if (current == DataTypes.INTEGER) { - return () -> new CastIntToLongEvaluator(in.get()); + return dvrCtx -> new CastIntToLongEvaluator(in.get(dvrCtx), dvrCtx); } throw cantCast(current, required); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 097a4e3086e1f..e23c34e506bc1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -18,7 +18,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -36,14 +35,12 @@ public Ceil(Source source, @Named("n") Expression n) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { if (dataType().isInteger()) { return toEvaluator.apply(field()); } - Supplier fieldEval = toEvaluator.apply(field()); - return () -> new CeilDoubleEvaluator(fieldEval.get()); + var fieldEval = toEvaluator.apply(field()); + return dvrCtx -> new CeilDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index 90a54ea5de06b..03e52d22dd4e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Cos(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new CosEvaluator(field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new CosEvaluator(field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 0652509a78144..e19741ffd4419 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Cosh(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new CoshEvaluator(source(), field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new CoshEvaluator(source(), field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index 7624bf7a1db77..1561113b684d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -18,7 +18,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -36,14 +35,12 @@ public Floor(Source source, @Named("n") Expression n) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { if (dataType().isInteger()) { return toEvaluator.apply(field()); } - Supplier fieldEval = toEvaluator.apply(field()); - return () -> new FloorDoubleEvaluator(fieldEval.get()); + var fieldEval = toEvaluator.apply(field()); + return dvrCtx -> new FloorDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java index 0db8a3b98189c..4d73516f2399c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; public class IsFinite extends RationalUnaryPredicate { public IsFinite(Source source, Expression field) { @@ -23,11 +22,9 @@ public IsFinite(Source source, Expression field) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new IsFiniteEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new IsFiniteEvaluator(field.get(dvrCtx), dvrCtx); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java index c5b6fce00b75d..0fb65d14eee04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; public class IsInfinite extends RationalUnaryPredicate { public IsInfinite(Source source, Expression field) { @@ -23,11 +22,9 @@ public IsInfinite(Source source, Expression field) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new IsInfiniteEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new IsInfiniteEvaluator(field.get(dvrCtx), dvrCtx); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java index 81bec68372639..1b44158d9e8fc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; public class IsNaN extends RationalUnaryPredicate { public IsNaN(Source source, Expression field) { @@ -23,11 +22,9 @@ public IsNaN(Source source, Expression field) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new IsNaNEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new IsNaNEvaluator(field.get(dvrCtx), dvrCtx); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 616172f165d0b..2f90898cb9086 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; @@ -22,7 +22,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -33,24 +32,21 @@ public Log10(Source source, @Named("n") Expression n) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); var fieldType = field().dataType(); - var eval = field.get(); if (fieldType == DataTypes.DOUBLE) { - return () -> new Log10DoubleEvaluator(source(), eval); + return dvrCtx -> new Log10DoubleEvaluator(source(), field.get(dvrCtx), dvrCtx); } if (fieldType == DataTypes.INTEGER) { - return () -> new Log10IntEvaluator(source(), eval); + return dvrCtx -> new Log10IntEvaluator(source(), field.get(dvrCtx), dvrCtx); } if (fieldType == DataTypes.LONG) { - return () -> new Log10LongEvaluator(source(), eval); + return dvrCtx -> new Log10LongEvaluator(source(), field.get(dvrCtx), dvrCtx); } if (fieldType == DataTypes.UNSIGNED_LONG) { - return () -> new Log10UnsignedLongEvaluator(source(), eval); + return dvrCtx -> new Log10UnsignedLongEvaluator(source(), field.get(dvrCtx), dvrCtx); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 2408cf88c25ff..976310511d05f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -24,7 +24,6 @@ import java.util.List; import java.util.Objects; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast.cast; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -165,28 +164,29 @@ public ScriptTemplate asScript() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var baseEvaluator = toEvaluator.apply(base); var exponentEvaluator = toEvaluator.apply(exponent); if (dataType == DataTypes.DOUBLE) { - return () -> new PowDoubleEvaluator( + return dvrCtx -> new PowDoubleEvaluator( source(), - cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(), - cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get() + cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(dvrCtx), + cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get(dvrCtx), + dvrCtx ); } else if (dataType == DataTypes.LONG) { - return () -> new PowLongEvaluator( + return dvrCtx -> new PowLongEvaluator( source(), - cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(), - cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get() + cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(dvrCtx), + cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get(dvrCtx), + dvrCtx ); } else { - return () -> new PowIntEvaluator( + return dvrCtx -> new PowIntEvaluator( source(), - cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(), - cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get() + cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(dvrCtx), + cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get(dvrCtx), + dvrCtx ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 3ce830837cd5f..d2126e50abed2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -27,7 +29,6 @@ import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -141,40 +142,38 @@ public ScriptTemplate asScript() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { DataType fieldType = dataType(); if (fieldType == DataTypes.DOUBLE) { return toEvaluator(toEvaluator, RoundDoubleNoDecimalsEvaluator::new, RoundDoubleEvaluator::new); } if (fieldType == DataTypes.INTEGER) { - return toEvaluator(toEvaluator, Function.identity(), RoundIntEvaluator::new); + return toEvaluator(toEvaluator, identity(), RoundIntEvaluator::new); } if (fieldType == DataTypes.LONG) { - return toEvaluator(toEvaluator, Function.identity(), RoundLongEvaluator::new); + return toEvaluator(toEvaluator, identity(), RoundLongEvaluator::new); } if (fieldType == DataTypes.UNSIGNED_LONG) { - return toEvaluator(toEvaluator, Function.identity(), RoundUnsignedLongEvaluator::new); + return toEvaluator(toEvaluator, identity(), RoundUnsignedLongEvaluator::new); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); } - private Supplier toEvaluator( - Function> toEvaluator, - Function noDecimals, - BiFunction withDecimals + private static BiFunction identity() { + return (t, u) -> t; + } + + private ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator, + BiFunction noDecimals, + TriFunction withDecimals ) { - Supplier fieldEvaluator = toEvaluator.apply(field()); + var fieldEvaluator = toEvaluator.apply(field()); if (decimals == null) { - return () -> noDecimals.apply(fieldEvaluator.get()); + return dvrCtx -> noDecimals.apply(fieldEvaluator.get(dvrCtx), dvrCtx); } - Supplier decimalsEvaluator = Cast.cast( - decimals().dataType(), - DataTypes.LONG, - toEvaluator.apply(decimals()) - ); - return () -> withDecimals.apply(fieldEvaluator.get(), decimalsEvaluator.get()); + var decimalsEvaluator = Cast.cast(decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); + return dvrCtx -> withDecimals.apply(fieldEvaluator.get(dvrCtx), decimalsEvaluator.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index 2a5f709d178d2..4045eaa38e346 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Sin(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new SinEvaluator(field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new SinEvaluator(field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index a790b6c52c184..2a5da6775f9d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Sinh(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new SinhEvaluator(source(), field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new SinhEvaluator(source(), field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index b2386bdd9abad..6340a0735abc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; @@ -22,7 +22,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -33,24 +32,21 @@ public Sqrt(Source source, @Named("n") Expression n) { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); var fieldType = field().dataType(); - var eval = field.get(); if (fieldType == DataTypes.DOUBLE) { - return () -> new SqrtDoubleEvaluator(source(), eval); + return dvrCtx -> new SqrtDoubleEvaluator(source(), field.get(dvrCtx), dvrCtx); } if (fieldType == DataTypes.INTEGER) { - return () -> new SqrtIntEvaluator(source(), eval); + return dvrCtx -> new SqrtIntEvaluator(source(), field.get(dvrCtx), dvrCtx); } if (fieldType == DataTypes.LONG) { - return () -> new SqrtLongEvaluator(source(), eval); + return dvrCtx -> new SqrtLongEvaluator(source(), field.get(dvrCtx), dvrCtx); } if (fieldType == DataTypes.UNSIGNED_LONG) { - return () -> new SqrtUnsignedLongEvaluator(eval); + return dvrCtx -> new SqrtUnsignedLongEvaluator(field.get(dvrCtx), dvrCtx); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 8a5047ac5764b..eda054b3c2051 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Tan(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new TanEvaluator(field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new TanEvaluator(field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index 6081ab4a1493f..65327a441103f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,8 +26,8 @@ public Tanh(Source source, @Named("n") Expression n) { } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field) { - return new TanhEvaluator(field); + protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { + return new TanhEvaluator(field, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 97f18b1154979..5bd3609461530 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -11,13 +11,12 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; -import java.util.function.Supplier; - /** * Base class for functions that reduce multivalued fields into single valued fields. */ @@ -29,7 +28,7 @@ protected AbstractMultivalueFunction(Source source, Expression field) { /** * Build the evaluator given the evaluator a multivalued field. */ - protected abstract Supplier evaluator(Supplier fieldEval); + protected abstract ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval); @Override protected final TypeResolution resolveType() { @@ -47,9 +46,7 @@ public final Object fold() { } @Override - public final Supplier toEvaluator( - java.util.function.Function> toEvaluator - ) { + public final ExpressionEvaluator.Factory toEvaluator(java.util.function.Function toEvaluator) { return evaluator(toEvaluator.apply(field())); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 8bfa48eefd9af..7930af6b25d8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.MvEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; @@ -19,7 +20,6 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -44,14 +44,14 @@ public DataType dataType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case DOUBLE -> () -> new MvAvgDoubleEvaluator(fieldEval.get()); - case INT -> () -> new MvAvgIntEvaluator(fieldEval.get()); + case DOUBLE -> dvrCtx -> new MvAvgDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case INT -> dvrCtx -> new MvAvgIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG - ? () -> new MvAvgUnsignedLongEvaluator(fieldEval.get()) - : () -> new MvAvgLongEvaluator(fieldEval.get()); - case NULL -> () -> EvalOperator.CONSTANT_NULL; + ? dvrCtx -> new MvAvgUnsignedLongEvaluator(fieldEval.get(dvrCtx), dvrCtx) + : dvrCtx -> new MvAvgLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index f6cf74a7f4d34..8cd8e4b3b2509 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -23,7 +24,6 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; @@ -55,12 +55,10 @@ public DataType dataType() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier fieldEval = toEvaluator.apply(left()); - Supplier delimEval = toEvaluator.apply(right()); - return () -> new MvConcatEvaluator(fieldEval.get(), delimEval.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var fieldEval = toEvaluator.apply(left()); + var delimEval = toEvaluator.apply(right()); + return dvrCtx -> new MvConcatEvaluator(fieldEval.get(dvrCtx), delimEval.get(dvrCtx)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 7aec87cf95f89..431c3f568a1db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,7 +22,6 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -44,8 +44,8 @@ public DataType dataType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { - return () -> new Evaluator(fieldEval.get()); + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { + return dvrCtx -> new Evaluator(fieldEval.get(dvrCtx)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index 536e231a6956a..cec9da98d96a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -34,7 +33,7 @@ protected TypeResolution resolveFieldType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return MultivalueDedupe.evaluator(LocalExecutionPlanner.toElementType(dataType()), fieldEval); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index 7aa9121dce321..5f527beef4967 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.MvEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -18,7 +19,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -36,14 +36,14 @@ protected TypeResolution resolveFieldType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case BOOLEAN -> () -> new MvMaxBooleanEvaluator(fieldEval.get()); - case BYTES_REF -> () -> new MvMaxBytesRefEvaluator(fieldEval.get()); - case DOUBLE -> () -> new MvMaxDoubleEvaluator(fieldEval.get()); - case INT -> () -> new MvMaxIntEvaluator(fieldEval.get()); - case LONG -> () -> new MvMaxLongEvaluator(fieldEval.get()); - case NULL -> () -> EvalOperator.CONSTANT_NULL; + case BOOLEAN -> dvrCtx -> new MvMaxBooleanEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case BYTES_REF -> dvrCtx -> new MvMaxBytesRefEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case DOUBLE -> dvrCtx -> new MvMaxDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case INT -> dvrCtx -> new MvMaxIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case LONG -> dvrCtx -> new MvMaxLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index c2916a270d830..edd68b1a45a37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -12,7 +12,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.ql.expression.Expression; @@ -23,7 +23,6 @@ import java.math.BigInteger; import java.util.Arrays; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -44,13 +43,13 @@ protected TypeResolution resolveFieldType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case DOUBLE -> () -> new MvMedianDoubleEvaluator(fieldEval.get()); - case INT -> () -> new MvMedianIntEvaluator(fieldEval.get()); + case DOUBLE -> dvrCtx -> new MvMedianDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case INT -> dvrCtx -> new MvMedianIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG - ? () -> new MvMedianUnsignedLongEvaluator(fieldEval.get()) - : () -> new MvMedianLongEvaluator(fieldEval.get()); + ? dvrCtx -> new MvMedianUnsignedLongEvaluator(fieldEval.get(dvrCtx), dvrCtx) + : dvrCtx -> new MvMedianLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index c24db3d268494..2647cbfc2e0c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.MvEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -18,7 +19,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -36,14 +36,14 @@ protected TypeResolution resolveFieldType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case BOOLEAN -> () -> new MvMinBooleanEvaluator(fieldEval.get()); - case BYTES_REF -> () -> new MvMinBytesRefEvaluator(fieldEval.get()); - case DOUBLE -> () -> new MvMinDoubleEvaluator(fieldEval.get()); - case INT -> () -> new MvMinIntEvaluator(fieldEval.get()); - case LONG -> () -> new MvMinLongEvaluator(fieldEval.get()); - case NULL -> () -> EvalOperator.CONSTANT_NULL; + case BOOLEAN -> dvrCtx -> new MvMinBooleanEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case BYTES_REF -> dvrCtx -> new MvMinBytesRefEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case DOUBLE -> dvrCtx -> new MvMinDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case INT -> dvrCtx -> new MvMinIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case LONG -> dvrCtx -> new MvMinLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index 843453154b31c..858842cd78721 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.MvEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; @@ -18,7 +19,6 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -38,14 +38,14 @@ protected TypeResolution resolveFieldType() { } @Override - protected Supplier evaluator(Supplier fieldEval) { + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case DOUBLE -> () -> new MvSumDoubleEvaluator(fieldEval.get()); - case INT -> () -> new MvSumIntEvaluator(source(), fieldEval.get()); + case DOUBLE -> dvrCtx -> new MvSumDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case INT -> dvrCtx -> new MvSumIntEvaluator(source(), fieldEval.get(dvrCtx), dvrCtx); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG - ? () -> new MvSumUnsignedLongEvaluator(source(), fieldEval.get()) - : () -> new MvSumLongEvaluator(source(), fieldEval.get()); - case NULL -> () -> EvalOperator.CONSTANT_NULL; + ? dvrCtx -> new MvSumUnsignedLongEvaluator(source(), fieldEval.get(dvrCtx), dvrCtx) + : dvrCtx -> new MvSumLongEvaluator(source(), fieldEval.get(dvrCtx), dvrCtx); + case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 7730080eaf3ae..b963575826182 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.ql.expression.Expression; @@ -26,7 +27,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -117,13 +117,10 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - List> evaluatorSuppliers = children().stream().map(toEvaluator).toList(); - return () -> new CoalesceEvaluator( + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + return dvrCxt -> new CoalesceEvaluator( LocalExecutionPlanner.toElementType(dataType()), - evaluatorSuppliers.stream().map(Supplier::get).toList() + children().stream().map(toEvaluator).map(x -> x.get(dvrCxt)).toList() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 547dfb5b9376c..4bde51fe28579 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -24,7 +25,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.Stream; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -72,13 +72,12 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - List> values = children().stream().map(toEvaluator).toList(); - return () -> new ConcatEvaluator( + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var values = children().stream().map(toEvaluator).toList(); + return dvrCtx -> new ConcatEvaluator( new BytesRefBuilder(), - values.stream().map(Supplier::get).toArray(EvalOperator.ExpressionEvaluator[]::new) + values.stream().map(fac -> fac.get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index eea6cbfd56d94..ffb5a3543f3f2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -10,7 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -20,7 +20,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; @@ -47,11 +46,9 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new LTrimEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new LTrimEvaluator(field.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 121225765f5af..8d129c686c9d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -11,7 +11,7 @@ import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,7 +25,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -69,16 +68,13 @@ static BytesRef process( } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - - Supplier strSupplier = toEvaluator.apply(str); - Supplier lengthSupplier = toEvaluator.apply(length); - return () -> { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var strSupplier = toEvaluator.apply(str); + var lengthSupplier = toEvaluator.apply(length); + return dvrCtx -> { BytesRef out = new BytesRef(); UnicodeUtil.UTF8CodePoint cp = new UnicodeUtil.UTF8CodePoint(); - return new LeftEvaluator(out, cp, strSupplier.get(), lengthSupplier.get()); + return new LeftEvaluator(out, cp, strSupplier.get(dvrCtx), lengthSupplier.get(dvrCtx), dvrCtx); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index b67151452dc0a..e0a1a8ed297a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -10,7 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,7 +21,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; @@ -72,10 +71,8 @@ protected NodeInfo info() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new LengthEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new LengthEvaluator(field.get(dvrCtx), dvrCtx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 82dbabc794013..46c8d43f0a5a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -10,7 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -20,7 +20,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; @@ -47,11 +46,9 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new RTrimEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new RTrimEvaluator(field.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index e41fb21b57797..6ab7b513d5737 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -11,7 +11,7 @@ import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Named; import org.elasticsearch.xpack.ql.expression.Expression; @@ -25,7 +25,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -73,16 +72,13 @@ static BytesRef process( } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - - Supplier strSupplier = toEvaluator.apply(str); - Supplier lengthSupplier = toEvaluator.apply(length); - return () -> { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var strSupplier = toEvaluator.apply(str); + var lengthSupplier = toEvaluator.apply(length); + return dvrCtx -> { BytesRef out = new BytesRef(); UnicodeUtil.UTF8CodePoint cp = new UnicodeUtil.UTF8CodePoint(); - return new RightEvaluator(out, cp, strSupplier.get(), lengthSupplier.get()); + return new RightEvaluator(out, cp, strSupplier.get(dvrCtx), lengthSupplier.get(dvrCtx), dvrCtx); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index b13f4efb25174..75d2ada1ca97b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -11,7 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -115,18 +114,16 @@ protected NodeInfo info() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier str = toEvaluator.apply(left()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var str = toEvaluator.apply(left()); if (right().foldable() == false) { - Supplier delim = toEvaluator.apply(right()); - return () -> new SplitVariableEvaluator(str.get(), delim.get(), new BytesRef()); + var delim = toEvaluator.apply(right()); + return dvrCtx -> new SplitVariableEvaluator(str.get(dvrCtx), delim.get(dvrCtx), new BytesRef(), dvrCtx); } BytesRef delim = (BytesRef) right().fold(); if (delim.length != 1) { throw new QlIllegalArgumentException("for now delimiter must be a single byte"); } - return () -> new SplitSingleByteEvaluator(str.get(), delim.bytes[delim.offset], new BytesRef()); + return dvrCtx -> new SplitSingleByteEvaluator(str.get(dvrCtx), delim.bytes[delim.offset], new BytesRef(), dvrCtx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index 3018a95b7d45d..8d147f3cf9caf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -9,7 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; @@ -22,7 +22,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -91,11 +90,9 @@ public ScriptTemplate asScript() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier strEval = toEvaluator.apply(str); - Supplier prefixEval = toEvaluator.apply(prefix); - return () -> new StartsWithEvaluator(strEval.get(), prefixEval.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var strEval = toEvaluator.apply(str); + var prefixEval = toEvaluator.apply(prefix); + return dvrCtx -> new StartsWithEvaluator(strEval.get(dvrCtx), prefixEval.get(dvrCtx), dvrCtx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index cca891231fb73..9b8a6c6aa1720 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -10,7 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -24,7 +24,6 @@ import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -132,15 +131,13 @@ public ScriptTemplate asScript() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier strSupplier = toEvaluator.apply(str); - Supplier startSupplier = toEvaluator.apply(start); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var strSupplier = toEvaluator.apply(str); + var startSupplier = toEvaluator.apply(start); if (length == null) { - return () -> new SubstringNoLengthEvaluator(strSupplier.get(), startSupplier.get()); + return dvrCtx -> new SubstringNoLengthEvaluator(strSupplier.get(dvrCtx), startSupplier.get(dvrCtx), dvrCtx); } - Supplier lengthSupplier = toEvaluator.apply(length); - return () -> new SubstringEvaluator(strSupplier.get(), startSupplier.get(), lengthSupplier.get()); + var lengthSupplier = toEvaluator.apply(length); + return dvrCtx -> new SubstringEvaluator(strSupplier.get(dvrCtx), startSupplier.get(dvrCtx), lengthSupplier.get(dvrCtx), dvrCtx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java index cdae8511a0822..f9d5febd5fc02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java @@ -10,7 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -20,7 +20,6 @@ import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; @@ -48,11 +47,9 @@ public Object fold() { } @Override - public Supplier toEvaluator( - Function> toEvaluator - ) { - Supplier field = toEvaluator.apply(field()); - return () -> new TrimEvaluator(field.get()); + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var field = toEvaluator.apply(field()); + return dvrCtx -> new TrimEvaluator(field.get(dvrCtx), dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java index 0ab7050386fb0..7da2754bfd931 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java @@ -33,7 +33,7 @@ public Add(Source source, Expression left, Expression right) { AddIntsEvaluator::new, AddLongsEvaluator::new, AddUnsignedLongsEvaluator::new, - (s, l, r) -> new AddDoublesEvaluator(l, r), + (s, l, r, dvrCtx) -> new AddDoublesEvaluator(l, r, dvrCtx), AddDatetimesEvaluator::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 01e0af5add780..3780e19a1dfd9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.common.TriFunction; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; @@ -18,14 +18,21 @@ import java.time.temporal.TemporalAmount; import java.util.function.Function; import java.util.function.Predicate; -import java.util.function.Supplier; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isDateTimeOrTemporal; abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperation { - interface DatetimeArithmeticEvaluator extends TriFunction {}; + /** Arithmetic (quad) function. */ + interface DatetimeArithmeticEvaluator { + ExpressionEvaluator apply( + Source source, + ExpressionEvaluator expressionEvaluator, + TemporalAmount temporalAmount, + DriverContext driverContext + ); + } private final DatetimeArithmeticEvaluator datetimes; @@ -61,12 +68,13 @@ protected TypeResolution resolveType() { } @Override - public Supplier toEvaluator(Function> toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { return dataType() == DataTypes.DATETIME - ? () -> datetimes.apply( + ? dvrCtx -> datetimes.apply( source(), - toEvaluator.apply(argumentOfType(DataTypes::isDateTime)).get(), - (TemporalAmount) argumentOfType(EsqlDataTypes::isTemporalAmount).fold() + toEvaluator.apply(argumentOfType(DataTypes::isDateTime)).get(dvrCtx), + (TemporalAmount) argumentOfType(EsqlDataTypes::isTemporalAmount).fold(), + dvrCtx ) : super.toEvaluator(toEvaluator); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java index ad85c6c203b10..5a89e24eb6007 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java @@ -34,7 +34,7 @@ public Div(Source source, Expression left, Expression right, DataType type) { DivIntsEvaluator::new, DivLongsEvaluator::new, DivUnsignedLongsEvaluator::new, - (s, l, r) -> new DivDoublesEvaluator(l, r) + (s, l, r, dvrCtx) -> new DivDoublesEvaluator(l, r, dvrCtx) ); this.type = type; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index b3c5ca390100e..5a417134c96fc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -7,9 +7,8 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -23,7 +22,6 @@ import java.io.IOException; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -71,7 +69,15 @@ public String symbol() { } } - interface ArithmeticEvaluator extends TriFunction {}; + /** Arithmetic (quad) function. */ + interface ArithmeticEvaluator { + ExpressionEvaluator apply( + Source source, + ExpressionEvaluator expressionEvaluator1, + ExpressionEvaluator expressionEvaluator2, + DriverContext driverContext + ); + } private final ArithmeticEvaluator ints; private final ArithmeticEvaluator longs; @@ -110,13 +116,13 @@ public DataType dataType() { } @Override - public Supplier toEvaluator(Function> toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var commonType = dataType(); var leftType = left().dataType(); if (leftType.isNumeric()) { - Supplier l = Cast.cast(left().dataType(), commonType, toEvaluator.apply(left())); - Supplier r = Cast.cast(right().dataType(), commonType, toEvaluator.apply(right())); + var l = Cast.cast(left().dataType(), commonType, toEvaluator.apply(left())); + var r = Cast.cast(right().dataType(), commonType, toEvaluator.apply(right())); ArithmeticEvaluator eval; if (commonType == INTEGER) { @@ -130,7 +136,7 @@ public Supplier toEvaluator(Function eval.apply(source(), l.get(), r.get()); + return dvrCtx -> eval.apply(source(), l.get(dvrCtx), r.get(dvrCtx), dvrCtx); } throw new EsqlIllegalArgumentException("Unsupported type " + leftType); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java index 8a59da3f5668c..f1aaeb1adaf14 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java @@ -26,7 +26,7 @@ public Mod(Source source, Expression left, Expression right) { ModIntsEvaluator::new, ModLongsEvaluator::new, ModUnsignedLongsEvaluator::new, - (s, l, r) -> new ModDoublesEvaluator(l, r) + (s, l, r, dvrCtx) -> new ModDoublesEvaluator(l, r, dvrCtx) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java index 1583981989440..9b42cfce182b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java @@ -30,7 +30,7 @@ public Mul(Source source, Expression left, Expression right) { MulIntsEvaluator::new, MulLongsEvaluator::new, MulUnsignedLongsEvaluator::new, - (s, l, r) -> new MulDoublesEvaluator(l, r) + (s, l, r, dvrCtx) -> new MulDoublesEvaluator(l, r, dvrCtx) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index 27a3bf362f199..0ce4b1bad6a37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -24,7 +24,6 @@ import java.time.Period; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -40,21 +39,21 @@ public Neg(Source source, Expression field) { } @Override - public Supplier toEvaluator(Function> toEvaluator) { + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { DataType type = dataType(); if (type.isNumeric()) { var f = toEvaluator.apply(field()); - Supplier supplier = null; + ExpressionEvaluator.Factory supplier = null; if (type == DataTypes.INTEGER) { - supplier = () -> new NegIntsEvaluator(source(), f.get()); + supplier = dvrCtx -> new NegIntsEvaluator(source(), f.get(dvrCtx), dvrCtx); } // Unsigned longs are unsupported by choice; negating them would require implicitly converting to long. else if (type == DataTypes.LONG) { - supplier = () -> new NegLongsEvaluator(source(), f.get()); + supplier = dvrCtx -> new NegLongsEvaluator(source(), f.get(dvrCtx), dvrCtx); } else if (type == DataTypes.DOUBLE) { - supplier = () -> new NegDoublesEvaluator(f.get()); + supplier = dvrCtx -> new NegDoublesEvaluator(f.get(dvrCtx), dvrCtx); } if (supplier != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java index a114795b07275..b00346b8cceb7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java @@ -36,7 +36,7 @@ public Sub(Source source, Expression left, Expression right) { SubIntsEvaluator::new, SubLongsEvaluator::new, SubUnsignedLongsEvaluator::new, - (s, l, r) -> new SubDoublesEvaluator(l, r), + (s, l, r, dvrCtx) -> new SubDoublesEvaluator(l, r, dvrCtx), SubDatetimesEvaluator::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index b2f73a624b00c..cd653c64213c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -92,7 +92,6 @@ import java.util.Optional; import java.util.Set; import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.Stream; import static java.util.stream.Collectors.joining; @@ -390,8 +389,7 @@ private PhysicalOperation planEval(EvalExec eval, LocalExecutionPlannerContext c PhysicalOperation source = plan(eval.child(), context); for (Alias field : eval.fields()) { - Supplier evaluatorSupplier; - evaluatorSupplier = EvalMapper.toEvaluator(field.child(), source.layout); + var evaluatorSupplier = EvalMapper.toEvaluator(field.child(), source.layout); Layout.Builder layout = source.layout.builder(); layout.append(field.toAttribute()); source = source.with(new EvalOperatorFactory(evaluatorSupplier), layout.build()); @@ -472,7 +470,7 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon ); } - private Supplier toEvaluator(Expression exp, Layout layout) { + private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { return EvalMapper.toEvaluator(exp, layout); } @@ -689,7 +687,7 @@ public Driver apply(String sessionId) { List operators = new ArrayList<>(); SinkOperator sink = null; boolean success = false; - var driverContext = new DriverContext(); + var driverContext = new DriverContext(bigArrays); try { source = physicalOperation.source(driverContext); physicalOperation.operators(operators, driverContext); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1db97943dfdad..80aeb25d49d4a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -13,7 +13,9 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.PathUtils; import org.elasticsearch.logging.LogManager; import org.elasticsearch.test.ESTestCase; @@ -56,7 +58,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -130,7 +131,7 @@ protected final Expression buildLiteralExpression(TestCaseSupplier.TestCase test return build(testCase.getSource(), testCase.getDataAsLiterals()); } - protected final Supplier evaluator(Expression e) { + protected final ExpressionEvaluator.Factory evaluator(Expression e) { e = new FoldNull().rule(e); if (e.foldable()) { e = new Literal(e.source(), e.fold(), e.dataType()); @@ -179,7 +180,7 @@ public final void testEvaluate() { expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType)); // TODO should we convert unsigned_long into BigDecimal so it's easier to assert? - Object result = toJavaObject(evaluator(expression).get().eval(row(testCase.getDataValues())), 0); + Object result = toJavaObject(evaluator(expression).get(new DriverContext()).eval(row(testCase.getDataValues())), 0); assertThat(result, not(equalTo(Double.NaN))); assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); @@ -193,7 +194,7 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); List simpleData = testCase.getDataValues(); - EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(); + EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(new DriverContext()); Block[] orig = BlockUtils.fromListRow(simpleData); for (int i = 0; i < orig.length; i++) { List data = new ArrayList<>(); @@ -221,7 +222,7 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); int count = 10_000; int threads = 5; - Supplier evalSupplier = evaluator(buildFieldExpression(testCase)); + var evalSupplier = evaluator(buildFieldExpression(testCase)); ExecutorService exec = Executors.newFixedThreadPool(threads); try { List> futures = new ArrayList<>(); @@ -230,7 +231,7 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru Page page = row(simpleData); futures.add(exec.submit(() -> { - EvalOperator.ExpressionEvaluator eval = evalSupplier.get(); + EvalOperator.ExpressionEvaluator eval = evalSupplier.get(new DriverContext()); for (int c = 0; c < count; c++) { assertThat(toJavaObject(eval.eval(page), 0), testCase.getMatcher()); } @@ -248,7 +249,7 @@ public final void testEvaluatorToString() { assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); var supplier = evaluator(buildFieldExpression(testCase)); - var ev = supplier.get(); + var ev = supplier.get(new DriverContext()); assertThat(ev.toString(), equalTo(testCase.evaluatorToString)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index db2e3fc482b8d..79138679e2414 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -88,7 +89,9 @@ protected Expression build(Source source, List args) { public void testEvalCase() { testCase( caseExpr -> toJavaObject( - caseExpr.toEvaluator(child -> evaluator(child)).get().eval(new Page(IntBlock.newConstantBlockWith(0, 1))), + caseExpr.toEvaluator(child -> evaluator(child)) + .get(new DriverContext()) + .eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0 ) ); @@ -148,13 +151,13 @@ public void testCaseIsLazy() { assertEquals(1, toJavaObject(caseExpr.toEvaluator(child -> { Object value = child.fold(); if (value != null && value.equals(2)) { - return () -> page -> { + return dvrCtx -> page -> { fail("Unexpected evaluation of 4th argument"); return null; }; } return evaluator(child); - }).get().eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0)); + }).get(new DriverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0)); } private static Case caseExpr(Object... args) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index f9e8886f960ad..cbfb0d6a579fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -84,12 +85,15 @@ public void testExamples() { } private Object process(Number val) { - return toJavaObject(evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), null)).get().eval(row(List.of(val))), 0); + return toJavaObject( + evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), null)).get(new DriverContext()).eval(row(List.of(val))), + 0 + ); } private Object process(Number val, int decimals) { return toJavaObject( - evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), field("decimals", DataTypes.INTEGER))).get() + evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), field("decimals", DataTypes.INTEGER))).get(new DriverContext()) .eval(row(List.of(val, decimals))), 0 ); @@ -115,7 +119,7 @@ protected DataType expectedType(List argTypes) { public void testNoDecimalsToString() { assertThat( - evaluator(new Round(Source.EMPTY, field("val", DataTypes.DOUBLE), null)).get().toString(), + evaluator(new Round(Source.EMPTY, field("val", DataTypes.DOUBLE), null)).get(new DriverContext()).toString(), equalTo("RoundDoubleNoDecimalsEvaluator[val=Attribute[channel=0]]") ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index ad74e1896a951..a300dbb383211 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -444,7 +445,7 @@ private void testBlock(boolean insertNulls) { builder.copyFrom(oneRowBlock, 0, 1); } Block input = builder.build(); - Block result = evaluator(buildFieldExpression(testCase)).get().eval(new Page(input)); + Block result = evaluator(buildFieldExpression(testCase)).get(new DriverContext()).eval(new Page(input)); assertThat(result.getPositionCount(), equalTo(result.getPositionCount())); for (int p = 0; p < input.getPositionCount(); p++) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java index 4bd6403c98e2a..f6082af0e142e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -71,18 +72,25 @@ public void testNull() { BytesRef bar = new BytesRef("bar"); BytesRef delim = new BytesRef(";"); Expression expression = buildFieldExpression(testCase); + DriverContext dvrCtx = new DriverContext(); - assertThat(toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(Arrays.asList(foo, bar), null))), 0), nullValue()); - assertThat(toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(foo, null))), 0), nullValue()); - assertThat(toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(null, null))), 0), nullValue()); + assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(Arrays.asList(foo, bar), null))), 0), nullValue()); + assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(foo, null))), 0), nullValue()); + assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(null, null))), 0), nullValue()); assertThat( - toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(Arrays.asList(foo, bar), Arrays.asList(delim, bar)))), 0), + toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(Arrays.asList(foo, bar), Arrays.asList(delim, bar)))), 0), + nullValue() + ); + assertThat( + toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(foo, Arrays.asList(delim, bar)))), 0), + nullValue() + ); + assertThat( + toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(null, Arrays.asList(delim, bar)))), 0), nullValue() ); - assertThat(toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(foo, Arrays.asList(delim, bar)))), 0), nullValue()); - assertThat(toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(null, Arrays.asList(delim, bar)))), 0), nullValue()); - assertThat(toJavaObject(evaluator(expression).get().eval(row(Arrays.asList(null, delim))), 0), nullValue()); + assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(null, delim))), 0), nullValue()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index cba09907f35b9..b2345e85336d4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -84,10 +85,10 @@ public void testCoalesceIsLazy() { Layout layout = builder.build(); assertThat(toJavaObject(exp.toEvaluator(child -> { if (child == evil) { - return () -> page -> { throw new AssertionError("shouldn't be called"); }; + return dvrCtx -> page -> { throw new AssertionError("shouldn't be called"); }; } return EvalMapper.toEvaluator(child, layout); - }).get().eval(row(testCase.getDataValues())), 0), testCase.getMatcher()); + }).get(new DriverContext()).eval(row(testCase.getDataValues())), 0), testCase.getMatcher()); } public void testCoalesceNullabilityIsUnknown() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index 2b10c2c4f806e..7d5c18a5e3fd6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -98,7 +99,7 @@ public void testMany() { field("a", DataTypes.KEYWORD), IntStream.range(1, 5).mapToObj(i -> field(Integer.toString(i), DataTypes.KEYWORD)).toList() ) - ).get().eval(row(simpleData)), + ).get(new DriverContext()).eval(row(simpleData)), 0 ), equalTo(new BytesRef("cats and dogs")) @@ -120,7 +121,7 @@ public void testSomeConstant() { field("c", DataTypes.KEYWORD) ) ) - ).get().eval(row(simpleData)), + ).get(new DriverContext()).eval(row(simpleData)), 0 ), equalTo(new BytesRef("cats and dogs")) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index eb68509fdfafa..77807e6463324 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -198,7 +199,7 @@ public void testUnicode() { private String process(String str, int length) { Block result = evaluator( new Left(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) - ).get().eval(row(List.of(new BytesRef(str)))); + ).get(new DriverContext()).eval(row(List.of(new BytesRef(str)))); if (null == result) { return null; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index 540051d9ac8b8..39222386a0cb0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -200,7 +201,7 @@ public void testUnicode() { private String process(String str, int length) { Block result = evaluator( new Right(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) - ).get().eval(row(List.of(new BytesRef(str)))); + ).get(new DriverContext()).eval(row(List.of(new BytesRef(str)))); if (null == result) { return null; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index aba167759d32a..fc426be21e3f6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -85,7 +86,7 @@ protected Expression build(Source source, List args) { public void testConstantDelimiter() { EvalOperator.ExpressionEvaluator eval = evaluator( new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) - ).get(); + ).get(new DriverContext()); /* * 58 is ascii for : and appears in the toString below. We don't convert the delimiter to a * string because we aren't really sure it's printable. It could be a tab or a bell or some diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 5742b97f8f3ff..19113ed65ffbe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -66,8 +67,9 @@ public Matcher resultsMatcher(List typedData public void testNoLengthToString() { assertThat( - evaluator(new Substring(Source.EMPTY, field("str", DataTypes.KEYWORD), field("start", DataTypes.INTEGER), null)).get() - .toString(), + evaluator(new Substring(Source.EMPTY, field("str", DataTypes.KEYWORD), field("start", DataTypes.INTEGER), null)).get( + new DriverContext() + ).toString(), equalTo("SubstringNoLengthEvaluator[str=Attribute[channel=0], start=Attribute[channel=1]]") ); } @@ -135,7 +137,7 @@ private String process(String str, int start, Integer length) { new Literal(Source.EMPTY, start, DataTypes.INTEGER), length == null ? null : new Literal(Source.EMPTY, length, DataTypes.INTEGER) ) - ).get().eval(row(List.of(new BytesRef(str)))); + ).get(new DriverContext()).eval(row(List.of(new BytesRef(str)))); return result == null ? null : ((BytesRef) toJavaObject(result, 0)).utf8ToString(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java index 8a4f8963cba96..0ac08b61ec39e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -92,7 +93,7 @@ public final void testApplyToAllTypes() { Source src = new Source(Location.EMPTY, lhsType.typeName() + " " + rhsType.typeName()); if (isRepresentable(lhsType) && isRepresentable(rhsType)) { op = build(src, field("lhs", lhsType), field("rhs", rhsType)); - result = toJavaObject(evaluator(op).get().eval(row(List.of(lhs.value(), rhs.value()))), 0); + result = toJavaObject(evaluator(op).get(new DriverContext()).eval(row(List.of(lhs.value(), rhs.value()))), 0); } else { op = build(src, lhs, rhs); result = op.fold(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index d5cd595ebc44a..f4de880bcd2b0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -171,7 +172,7 @@ public void testEdgeCases() { private Object process(Object val) { if (testCase.allTypesAreRepresentable()) { Neg neg = new Neg(Source.EMPTY, field("val", typeOf(val))); - return toJavaObject(evaluator(neg).get().eval(row(List.of(val))), 0); + return toJavaObject(evaluator(neg).get(new DriverContext()).eval(row(List.of(val))), 0); } else { // just fold if type is not representable Neg neg = new Neg(Source.EMPTY, new Literal(Source.EMPTY, val, typeOf(val))); return neg.fold(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index f42c86d4b028a..34e6670862249 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.SerializationTestUtils; @@ -53,7 +54,6 @@ import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.function.Supplier; public class EvalMapperTests extends ESTestCase { private static final FieldAttribute DOUBLE1 = field("foo", DataTypes.DOUBLE); @@ -127,9 +127,9 @@ public void testEvaluatorSuppliers() { lb.append(LONG); Layout layout = lb.build(); - Supplier supplier = EvalMapper.toEvaluator(expression, layout); - EvalOperator.ExpressionEvaluator evaluator1 = supplier.get(); - EvalOperator.ExpressionEvaluator evaluator2 = supplier.get(); + var supplier = EvalMapper.toEvaluator(expression, layout); + EvalOperator.ExpressionEvaluator evaluator1 = supplier.get(new DriverContext()); + EvalOperator.ExpressionEvaluator evaluator2 = supplier.get(new DriverContext()); assertNotNull(evaluator1); assertNotNull(evaluator2); assertTrue(evaluator1 != evaluator2); From c84f20bfe2ba7e41dc011d62b6efe21a4237f8cb Mon Sep 17 00:00:00 2001 From: William Brafford Date: Wed, 13 Sep 2023 17:27:15 -0400 Subject: [PATCH 055/114] Default implementation for deprecated method (#99554) This is a followup to #99396. Downstream projects have switched to using the new method, but still override the deprecated method. Here, we give the deprecated method a default implementation so we can remove it, and remove the default implementation from the new method. Once this is merged, downstream projects will be able to remove their implementations of the deprecated method. --- .../plugins/ClusterCoordinationPlugin.java | 12 +++---- .../org/elasticsearch/node/NodeTests.java | 34 +++++-------------- 2 files changed, 15 insertions(+), 31 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/ClusterCoordinationPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ClusterCoordinationPlugin.java index 14305e1a8a04f..28f3a778c01a0 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ClusterCoordinationPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ClusterCoordinationPlugin.java @@ -79,22 +79,22 @@ CoordinationState.PersistedState createPersistedState( interface PersistedClusterStateServiceFactory { @Deprecated(forRemoval = true) - PersistedClusterStateService newPersistedClusterStateService( + default PersistedClusterStateService newPersistedClusterStateService( NodeEnvironment nodeEnvironment, NamedXContentRegistry xContentRegistry, ClusterSettings clusterSettings, ThreadPool threadPool - ); + ) { + throw new AssertionError("Should not be called!"); + } - default PersistedClusterStateService newPersistedClusterStateService( + PersistedClusterStateService newPersistedClusterStateService( NodeEnvironment nodeEnvironment, NamedXContentRegistry xContentRegistry, ClusterSettings clusterSettings, ThreadPool threadPool, CompatibilityVersions compatibilityVersions - ) { - return newPersistedClusterStateService(nodeEnvironment, xContentRegistry, clusterSettings, threadPool); - } + ); } interface ReconfiguratorFactory { diff --git a/server/src/test/java/org/elasticsearch/node/NodeTests.java b/server/src/test/java/org/elasticsearch/node/NodeTests.java index ce54b00cec9ef..0a0040eebea39 100644 --- a/server/src/test/java/org/elasticsearch/node/NodeTests.java +++ b/server/src/test/java/org/elasticsearch/node/NodeTests.java @@ -16,14 +16,12 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.core.RestApiVersion; @@ -635,33 +633,19 @@ private static class BaseTestClusterCoordinationPlugin extends Plugin implements @Override public Optional getPersistedClusterStateServiceFactory() { - return Optional.of(new PersistedClusterStateServiceFactory() { - @Override - public PersistedClusterStateService newPersistedClusterStateService( - NodeEnvironment nodeEnvironment, - NamedXContentRegistry xContentRegistry, - ClusterSettings clusterSettings, - ThreadPool threadPool - ) { - throw new AssertionError("not called"); - } - - @Override - public PersistedClusterStateService newPersistedClusterStateService( - NodeEnvironment nodeEnvironment, - NamedXContentRegistry namedXContentRegistry, - ClusterSettings clusterSettings, - ThreadPool threadPool, - CompatibilityVersions compatibilityVersions - ) { - return persistedClusterStateService = new PersistedClusterStateService( + return Optional.of( + ( + nodeEnvironment, + namedXContentRegistry, + clusterSettings, + threadPool, + compatibilityVersions) -> persistedClusterStateService = new PersistedClusterStateService( nodeEnvironment, namedXContentRegistry, clusterSettings, threadPool::relativeTimeInMillis - ); - } - }); + ) + ); } } From b2df3313fc191613dc37053cab1572c2f33b8e42 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 13 Sep 2023 20:48:09 -0700 Subject: [PATCH 056/114] Make cat actions list extensible (#99504) This commit adds an internal extension for controlling which cat actions are returned by /_cat. --- server/src/main/java/module-info.java | 5 ++- .../elasticsearch/action/ActionModule.java | 13 +++++-- .../plugins/internal/RestExtension.java | 36 +++++++++++++++++++ 3 files changed, 51 insertions(+), 3 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 1395aae41e2af..472707babf155 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -7,6 +7,7 @@ */ import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.plugins.internal.RestExtension; /** The Elasticsearch Server Module. */ module org.elasticsearch.server { @@ -376,7 +377,8 @@ to org.elasticsearch.metering, org.elasticsearch.settings.secure, - org.elasticsearch.serverless.constants; + org.elasticsearch.serverless.constants, + org.elasticsearch.serverless.apifiltering; provides java.util.spi.CalendarDataProvider with org.elasticsearch.common.time.IsoCalendarDataProvider; provides org.elasticsearch.xcontent.ErrorOnUnknown with org.elasticsearch.common.xcontent.SuggestingErrorOnUnknown; @@ -392,6 +394,7 @@ uses org.elasticsearch.internal.VersionExtension; uses org.elasticsearch.internal.BuildExtension; uses org.elasticsearch.plugins.internal.SettingsExtension; + uses RestExtension; provides org.apache.lucene.codecs.PostingsFormat with diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 2304115614af8..5683ace63ba3b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -306,6 +306,7 @@ import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ActionPlugin.ActionHandler; import org.elasticsearch.plugins.interceptor.RestServerActionPlugin; +import org.elasticsearch.plugins.internal.RestExtension; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.service.ReservedClusterStateService; import org.elasticsearch.rest.RestController; @@ -470,6 +471,7 @@ import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.UnaryOperator; import java.util.stream.Collectors; @@ -849,10 +851,17 @@ private static ActionFilters setupActionFilters(List actionPlugins public void initRestHandlers(Supplier nodesInCluster) { List catActions = new ArrayList<>(); + var restExtension = RestExtension.load(() -> new RestExtension() { + @Override + public Predicate getCatActionsFilter() { + return action -> true; + } + }); + Predicate catActionsFilter = restExtension.getCatActionsFilter(); Consumer registerHandler = handler -> { if (shouldKeepRestHandler(handler)) { - if (handler instanceof AbstractCatAction) { - catActions.add((AbstractCatAction) handler); + if (handler instanceof AbstractCatAction catAction && catActionsFilter.test(catAction)) { + catActions.add(catAction); } restController.registerHandler(handler); } else { diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java b/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java new file mode 100644 index 0000000000000..da5de4f784a22 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/internal/RestExtension.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.plugins.internal; + +import org.elasticsearch.rest.action.cat.AbstractCatAction; + +import java.util.ServiceLoader; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public interface RestExtension { + /** + * Returns a filter that determines which cat actions are exposed in /_cat. + * + * The filter should return {@code true} if an action should be included, + * or {@code false} otherwise. + */ + Predicate getCatActionsFilter(); + + static RestExtension load(Supplier fallback) { + var loader = ServiceLoader.load(RestExtension.class); + var extensions = loader.stream().toList(); + if (extensions.size() > 1) { + throw new IllegalStateException("More than one rest extension found"); + } else if (extensions.size() == 0) { + return fallback.get(); + } + return extensions.get(0).get(); + } +} From 8726f1653fd5931b5c60a6fa866915327a624b1a Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 14 Sep 2023 10:09:51 +0100 Subject: [PATCH 057/114] Add IndexVersion to node info (#99515) This adds index_version field to node info, so we can read it in bwc tests to determine the index version we are upgrading from --- docs/changelog/99515.yaml | 5 +++++ docs/reference/cluster/nodes-info.asciidoc | 7 +++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../admin/cluster/node/info/NodeInfo.java | 19 +++++++++++++++++++ .../cluster/node/info/NodesInfoResponse.java | 1 + .../org/elasticsearch/node/NodeService.java | 2 ++ .../cluster/node/info/NodeInfoTests.java | 1 + .../remote/RemoteClusterNodesActionTests.java | 3 +++ .../cluster/stats/ClusterStatsNodesTests.java | 2 ++ .../ingest/ReservedPipelineActionTests.java | 2 ++ .../TransportVersionsFixupListenerTests.java | 1 + .../nodesinfo/NodeInfoStreamingTests.java | 3 +++ .../action/cat/RestPluginsActionTests.java | 2 ++ .../AutoscalingNodesInfoServiceTests.java | 2 ++ .../TransportNodeEnrollmentActionTests.java | 2 ++ ...InternalEnrollmentTokenGeneratorTests.java | 3 +++ 16 files changed, 56 insertions(+) create mode 100644 docs/changelog/99515.yaml diff --git a/docs/changelog/99515.yaml b/docs/changelog/99515.yaml new file mode 100644 index 0000000000000..7de237531a506 --- /dev/null +++ b/docs/changelog/99515.yaml @@ -0,0 +1,5 @@ +pr: 99515 +summary: Add `IndexVersion` to node info +area: Infra/REST API +type: enhancement +issues: [] diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index e2dbffcd9705a..00650ce948b6f 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -136,6 +136,9 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] `transport_version`:: The most recent transport version that this node can communicate with. +`index_version`:: + The most recent index version that this node can read. + The `os` flag can be set to retrieve information that concern the operating system: @@ -232,6 +235,7 @@ The API returns the following response: "ip": "192.168.17", "version": "{version}", "transport_version": 100000298, + "index_version": 100000074, "build_flavor": "default", "build_type": "{build_type}", "build_hash": "587409e", @@ -271,6 +275,7 @@ The API returns the following response: // TESTRESPONSE[s/"host": "node-0.elastic.co"/"host": $body.$_path/] // TESTRESPONSE[s/"ip": "192.168.17"/"ip": $body.$_path/] // TESTRESPONSE[s/"transport_version": 100000298/"transport_version": $body.$_path/] +// TESTRESPONSE[s/"index_version": 100000074/"index_version": $body.$_path/] // TESTRESPONSE[s/"build_hash": "587409e"/"build_hash": $body.$_path/] // TESTRESPONSE[s/"roles": \[[^\]]*\]/"roles": $body.$_path/] // TESTRESPONSE[s/"attributes": \{[^\}]*\}/"attributes": $body.$_path/] @@ -305,6 +310,7 @@ The API returns the following response: "ip": "192.168.17", "version": "{version}", "transport_version": 100000298, + "index_version": 100000074, "build_flavor": "default", "build_type": "{build_type}", "build_hash": "587409e", @@ -368,6 +374,7 @@ The API returns the following response: // TESTRESPONSE[s/"host": "node-0.elastic.co"/"host": $body.$_path/] // TESTRESPONSE[s/"ip": "192.168.17"/"ip": $body.$_path/] // TESTRESPONSE[s/"transport_version": 100000298/"transport_version": $body.$_path/] +// TESTRESPONSE[s/"index_version": 100000074/"index_version": $body.$_path/] // TESTRESPONSE[s/"build_hash": "587409e"/"build_hash": $body.$_path/] // TESTRESPONSE[s/"roles": \[[^\]]*\]/"roles": $body.$_path/] // TESTRESPONSE[s/"attributes": \{[^\}]*\}/"attributes": $body.$_path/] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 844931dee9eb4..f1d5bc6f02a7c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -136,6 +136,7 @@ static TransportVersion def(int id) { public static final TransportVersion SHARD_SIZE_PRIMARY_TERM_GEN_ADDED = def(8_500_072); public static final TransportVersion COMPAT_VERSIONS_MAPPING_VERSION_ADDED = def(8_500_073); public static final TransportVersion V_8_500_074 = def(8_500_074); + public static final TransportVersion NODE_INFO_INDEX_VERSION_ADDED = def(8_500_075); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 35dc876b3a585..3086f22fae8bc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.ingest.IngestInfo; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.OsInfo; @@ -41,6 +42,7 @@ public class NodeInfo extends BaseNodeResponse { private final Version version; private final TransportVersion transportVersion; + private final IndexVersion indexVersion; private final Build build; @Nullable @@ -64,6 +66,11 @@ public NodeInfo(StreamInput in) throws IOException { } else { transportVersion = TransportVersion.fromId(version.id); } + if (in.getTransportVersion().onOrAfter(TransportVersions.NODE_INFO_INDEX_VERSION_ADDED)) { + indexVersion = IndexVersion.readVersion(in); + } else { + indexVersion = IndexVersion.fromId(version.id); + } build = Build.readBuild(in); if (in.readBoolean()) { totalIndexingBuffer = ByteSizeValue.ofBytes(in.readLong()); @@ -94,6 +101,7 @@ public NodeInfo(StreamInput in) throws IOException { public NodeInfo( Version version, TransportVersion transportVersion, + IndexVersion indexVersion, Build build, DiscoveryNode node, @Nullable Settings settings, @@ -112,6 +120,7 @@ public NodeInfo( super(node); this.version = version; this.transportVersion = transportVersion; + this.indexVersion = indexVersion; this.build = build; this.settings = settings; addInfoIfNonNull(OsInfo.class, os); @@ -149,6 +158,13 @@ public TransportVersion getTransportVersion() { return transportVersion; } + /** + * The most recent index version that can be used by this node + */ + public IndexVersion getIndexVersion() { + return indexVersion; + } + /** * The build version of the node. */ @@ -200,6 +216,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { TransportVersion.writeVersion(transportVersion, out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.NODE_INFO_INDEX_VERSION_ADDED)) { + IndexVersion.writeVersion(indexVersion, out); + } Build.writeBuild(build, out); if (totalIndexingBuffer == null) { out.writeBoolean(false); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index 2b97ee38daa9f..68769af5a17d9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -65,6 +65,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("version", nodeInfo.getVersion()); builder.field("transport_version", nodeInfo.getTransportVersion().id()); + builder.field("index_version", nodeInfo.getIndexVersion().id()); builder.field("build_flavor", nodeInfo.getBuild().flavor()); builder.field("build_type", nodeInfo.getBuild().type().displayName()); builder.field("build_hash", nodeInfo.getBuild().hash()); diff --git a/server/src/main/java/org/elasticsearch/node/NodeService.java b/server/src/main/java/org/elasticsearch/node/NodeService.java index 1bb039ecfbe26..f71fada3c046a 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeService.java +++ b/server/src/main/java/org/elasticsearch/node/NodeService.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -114,6 +115,7 @@ public NodeInfo info( return new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), Build.current(), transportService.getLocalNode(), settings ? settingsFilter.filter(this.settings) : null, diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java index 1aa2eb16c0309..35f17c24b3e89 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java @@ -38,6 +38,7 @@ public void testGetInfo() { NodeInfo nodeInfo = new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), Build.current(), DiscoveryNodeUtils.builder("test_node") .roles(emptySet()) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java index c75d79f1ea18e..863a57a42c2bc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -78,6 +79,7 @@ public void testDoExecuteForRemoteServerNodes() { new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), null, node, null, @@ -147,6 +149,7 @@ public void testDoExecuteForRemoteNodes() { new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), null, node, null, diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java index 060683818b9a7..80c320493f4ac 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.stats.IndexingPressureStats; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.test.ESTestCase; @@ -323,6 +324,7 @@ private static NodeInfo createNodeInfo(String nodeId, String transportType, Stri return new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), Build.current(), DiscoveryNodeUtils.create(nodeId, buildNewFakeTransportAddress()), settings.build(), diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index 520d60a4dfbf6..05b35f992564f 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.ingest.FakeProcessor; import org.elasticsearch.ingest.IngestInfo; import org.elasticsearch.ingest.IngestService; @@ -101,6 +102,7 @@ public void setup() { NodeInfo nodeInfo = new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), Build.current(), discoveryNode, Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java index f213d7e366ce4..3c8540c7771c6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java @@ -86,6 +86,7 @@ private static NodesInfoResponse getResponse(Map respo null, e.getValue(), null, + null, DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)), null, null, diff --git a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index f48cf67bf5466..8a338333065d7 100644 --- a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -73,6 +73,8 @@ private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) t assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion())); + assertThat(nodeInfo.getTransportVersion(), equalTo(readNodeInfo.getTransportVersion())); + assertThat(nodeInfo.getIndexVersion(), equalTo(readNodeInfo.getIndexVersion())); compareJsonOutput(nodeInfo.getInfo(HttpInfo.class), readNodeInfo.getInfo(HttpInfo.class)); compareJsonOutput(nodeInfo.getInfo(RemoteClusterServerInfo.class), readNodeInfo.getInfo(RemoteClusterServerInfo.class)); compareJsonOutput(nodeInfo.getInfo(JvmInfo.class), readNodeInfo.getInfo(JvmInfo.class)); @@ -230,6 +232,7 @@ private static NodeInfo createNodeInfo() { return new NodeInfo( VersionUtils.randomVersion(random()), TransportVersionUtils.randomVersion(random()), + IndexVersionUtils.randomVersion(random()), build, node, settings, diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestPluginsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestPluginsActionTests.java index 5d26036769a35..5249fe6aec9f0 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestPluginsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestPluginsActionTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.plugins.PluginDescriptor; import org.elasticsearch.plugins.PluginRuntimeInfo; import org.elasticsearch.rest.RestRequest; @@ -64,6 +65,7 @@ private Table buildTable(List pluginDescriptor) { new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), null, node(i), null, diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java index c0c7de657eafe..673b05c7c2466 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.unit.Processors; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.test.client.NoOpClient; @@ -450,6 +451,7 @@ private static org.elasticsearch.action.admin.cluster.node.info.NodeInfo infoFor return new org.elasticsearch.action.admin.cluster.node.info.NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), Build.current(), node, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java index 6e31d55c81d29..6144ae74c7692 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportNodeEnrollmentActionTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -103,6 +104,7 @@ public void testDoExecute() throws Exception { new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), null, n, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java index 448265f7efcf4..02b088e245120 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGeneratorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.Environment; import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -231,6 +232,7 @@ public Answer answerNullHttpInfo(InvocationOnMock invocationO new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), null, DiscoveryNodeUtils.builder("1").name("node-name").roles(Set.of()).build(), null, @@ -264,6 +266,7 @@ private Answer answerWithInfo(InvocationOnMock invocationOnMo new NodeInfo( Version.CURRENT, TransportVersion.current(), + IndexVersion.current(), null, DiscoveryNodeUtils.builder("1").name("node-name").roles(Set.of()).build(), null, From 154dc9ac576035727b6f21586ce0a81091eaee99 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Thu, 14 Sep 2023 12:40:15 +0200 Subject: [PATCH 058/114] Make tsdb settings public in Serverless (#99567) * fix: make tsdb settings public in serverless Creation of time series indices fails in serverless because some tsdb specific settings are not public. This results in errors when creating time series indices whose settings include: * index.mode * index.time_series.start_time * index.time_series.end_time * index.look_ahead_time * index.look_back_time * index.routing_path Here we make them public so that users can create time series indices. * Update docs/changelog/99567.yaml --- docs/changelog/99567.yaml | 6 ++++++ .../org/elasticsearch/datastreams/DataStreamsPlugin.java | 6 ++++-- .../elasticsearch/cluster/metadata/IndexMetadata.java | 3 ++- .../main/java/org/elasticsearch/index/IndexSettings.java | 9 ++++++--- 4 files changed, 18 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/99567.yaml diff --git a/docs/changelog/99567.yaml b/docs/changelog/99567.yaml new file mode 100644 index 0000000000000..aea65e55b6ee2 --- /dev/null +++ b/docs/changelog/99567.yaml @@ -0,0 +1,6 @@ +pr: 99567 +summary: Make tsdb settings public in Serverless +area: TSDB +type: bug +issues: + - 99563 diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index cd221ada7a4dc..313a6dd459668 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -102,7 +102,8 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin { TimeValue.timeValueMinutes(1), TimeValue.timeValueDays(7), Setting.Property.IndexScope, - Setting.Property.Dynamic + Setting.Property.Dynamic, + Setting.Property.ServerlessPublic ); public static final String LIFECYCLE_CUSTOM_INDEX_METADATA_KEY = "data_stream_lifecycle"; public static final Setting LOOK_BACK_TIME = Setting.timeSetting( @@ -111,7 +112,8 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin { TimeValue.timeValueMinutes(1), TimeValue.timeValueDays(7), Setting.Property.IndexScope, - Setting.Property.Dynamic + Setting.Property.Dynamic, + Setting.Property.ServerlessPublic ); // The dependency of index.look_ahead_time is a cluster setting and currently there is no clean validation approach for this: private final SetOnce updateTimeSeriesRangeService = new SetOnce<>(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index ab67478192c11..9709e149b28d1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -493,7 +493,8 @@ public Iterator> settings() { public static final Setting> INDEX_ROUTING_PATH = Setting.stringListSetting( "index.routing_path", Setting.Property.IndexScope, - Setting.Property.Final + Setting.Property.Final, + Property.ServerlessPublic ); /** diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 3a2b01f5cc9c4..d887ed8d1531d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -590,7 +590,8 @@ public Iterator> settings() { Instant.ofEpochMilli(DateUtils.MAX_MILLIS_BEFORE_MINUS_9999), v -> {}, Property.IndexScope, - Property.Final + Property.Final, + Property.ServerlessPublic ); /** @@ -619,7 +620,8 @@ public Iterator> settings() { } }, Property.IndexScope, - Property.Dynamic + Property.Dynamic, + Property.ServerlessPublic ); public static final Setting TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING = Setting.boolSetting( @@ -658,7 +660,8 @@ public Iterator> settings() { } }, Property.IndexScope, - Property.Final + Property.Final, + Property.ServerlessPublic ); /** From ff67aaca69bf8bd34d1b96dda0e9c33a7ecc1f9f Mon Sep 17 00:00:00 2001 From: Ed Savage Date: Thu, 14 Sep 2023 13:12:46 +0100 Subject: [PATCH 059/114] [ML] Fix failing JobUpdateTests.testMergeWithJob test (#99571) Ensure that a random compatible _MlConfigVersion_ is used to set the job version. --- .../xpack/core/ml/job/config/JobUpdateTests.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java index 78a89877cbfef..543360fc24d89 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java @@ -7,14 +7,12 @@ package org.elasticsearch.xpack.core.ml.job.config; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.AbstractXContentSerializingTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; @@ -294,7 +292,7 @@ public void testMergeWithJob() { updateBuilder.setPerPartitionCategorizationConfig(new PerPartitionCategorizationConfig(true, randomBoolean())); updateBuilder.setCustomSettings(customSettings); updateBuilder.setModelSnapshotId(randomAlphaOfLength(10)); - updateBuilder.setJobVersion(MlConfigVersion.fromVersion(VersionUtils.randomCompatibleVersion(random(), Version.CURRENT))); + updateBuilder.setJobVersion(MlConfigVersionUtils.randomCompatibleVersion(random())); updateBuilder.setModelPruneWindow(TimeValue.timeValueDays(randomIntBetween(1, 100))); JobUpdate update = updateBuilder.build(); From f0a6596c177bd08c7d049e1c20ffeea01b66edfc Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 14 Sep 2023 09:35:40 -0400 Subject: [PATCH 060/114] ESQL: Update status every second or so (#99556) This changes `Driver` to report status after at least a second has passed. Between each iteration we check if a second has passed since we last reported status. If more than a second has passed we'll report status. That second is a pragma that can be overridden. And we override it in the EsqlActionTaskIT so we can test the status again. --- .../compute/lucene/LuceneOperator.java | 9 +- .../compute/lucene/LuceneSourceOperator.java | 5 + .../lucene/LuceneTopNSourceOperator.java | 6 + .../compute/operator/Driver.java | 30 ++++- .../compute/operator/DriverStatus.java | 2 + .../LuceneSourceOperatorStatusTests.java | 20 +-- .../compute/operator/OperatorTestCase.java | 1 + .../exchange/ExchangeServiceTests.java | 22 +++- .../xpack/esql/action/EsqlActionTaskIT.java | 122 +++++++++++------- .../esql/enrich/EnrichLookupService.java | 1 + .../esql/planner/LocalExecutionPlanner.java | 14 +- .../xpack/esql/plugin/QueryPragmas.java | 17 +++ 12 files changed, 176 insertions(+), 73 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index e0b25469d5ab2..ec1e13d033a8b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -152,11 +152,14 @@ SearchContext searchContext() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append(this.getClass().getSimpleName()).append("["); - sb.append(", maxPageSize=").append(maxPageSize); + sb.append("maxPageSize=").append(maxPageSize); + describe(sb); sb.append("]"); return sb.toString(); } + protected abstract void describe(StringBuilder sb); + @Override public Operator.Status status() { return new Status(this); @@ -234,11 +237,11 @@ public int pagesEmitted() { return pagesEmitted; } - public int leafPosition() { + public int slicePosition() { return slicePosition; } - public int leafSize() { + public int sliceSize() { return sliceSize; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 540cee388efc9..0bbb6571dc4fd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -160,4 +160,9 @@ public Page getOutput() { throw new UncheckedIOException(e); } } + + @Override + protected void describe(StringBuilder sb) { + sb.append(", remainingDocs=").append(remainingDocs); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 48389d31e08be..4c6bb50ce9f7f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -212,6 +212,12 @@ private Page emit(boolean startEmitting) { ); } + @Override + protected void describe(StringBuilder sb) { + sb.append(", limit=").append(limit); + sb.append(", sorts=").append(sorts); + } + static final class PerShardCollector { private final int shardIndex; private final SearchContext searchContext; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 4808094e116be..5f5cbe2707fe3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -40,15 +40,25 @@ public class Driver implements Releasable, Describable { public static final TimeValue DEFAULT_TIME_BEFORE_YIELDING = TimeValue.timeValueMinutes(5); public static final int DEFAULT_MAX_ITERATIONS = 10_000; + /** + * Minimum time between updating status. + */ + public static final TimeValue DEFAULT_STATUS_INTERVAL = TimeValue.timeValueSeconds(1); private final String sessionId; private final DriverContext driverContext; private final Supplier description; private final List activeOperators; private final Releasable releasable; + private final long statusNanos; private final AtomicReference cancelReason = new AtomicReference<>(); private final AtomicReference> blocked = new AtomicReference<>(); + /** + * Status reported to the tasks API. We write the status at most once every + * {@link #statusNanos}, as soon as loop has finished and after {@link #statusNanos} + * have passed. + */ private final AtomicReference status; /** @@ -58,6 +68,7 @@ public class Driver implements Releasable, Describable { * @param source source operator * @param intermediateOperators the chain of operators to execute * @param sink sink operator + * @param statusInterval minimum status reporting interval * @param releasable a {@link Releasable} to invoked once the chain of operators has run to completion */ public Driver( @@ -67,6 +78,7 @@ public Driver( SourceOperator source, List intermediateOperators, SinkOperator sink, + TimeValue statusInterval, Releasable releasable ) { this.sessionId = sessionId; @@ -76,6 +88,7 @@ public Driver( this.activeOperators.add(source); this.activeOperators.addAll(intermediateOperators); this.activeOperators.add(sink); + this.statusNanos = statusInterval.nanos(); this.releasable = releasable; this.status = new AtomicReference<>(new DriverStatus(sessionId, System.currentTimeMillis(), DriverStatus.Status.QUEUED, List.of())); } @@ -95,7 +108,7 @@ public Driver( SinkOperator sink, Releasable releasable ) { - this("unset", driverContext, () -> null, source, intermediateOperators, sink, releasable); + this("unset", driverContext, () -> null, source, intermediateOperators, sink, DEFAULT_STATUS_INTERVAL, releasable); } public DriverContext driverContext() { @@ -110,26 +123,33 @@ public DriverContext driverContext() { private ListenableActionFuture run(TimeValue maxTime, int maxIterations) { long maxTimeNanos = maxTime.nanos(); long startTime = System.nanoTime(); + long nextStatus = startTime + statusNanos; int iter = 0; while (isFinished() == false) { ListenableActionFuture fut = runSingleLoopIteration(); if (fut.isDone() == false) { + status.set(updateStatus(DriverStatus.Status.ASYNC)); return fut; } - if (++iter >= maxIterations) { + if (iter >= maxIterations) { break; } long now = System.nanoTime(); + if (now > nextStatus) { + status.set(updateStatus(DriverStatus.Status.RUNNING)); + nextStatus = now + statusNanos; + } + iter++; if (now - startTime > maxTimeNanos) { break; } } if (isFinished()) { - status.set(updateStatus(DriverStatus.Status.DONE)); // Report status for the tasks API + status.set(updateStatus(DriverStatus.Status.DONE)); driverContext.finish(); releasable.close(); } else { - status.set(updateStatus(DriverStatus.Status.RUNNING)); // Report status for the tasks API + status.set(updateStatus(DriverStatus.Status.WAITING)); } return Operator.NOT_BLOCKED; } @@ -227,7 +247,7 @@ private void ensureNotCancelled() { } public static void start(Executor executor, Driver driver, int maxIterations, ActionListener listener) { - driver.status.set(driver.updateStatus(DriverStatus.Status.STARTING)); // Report status for the tasks API + driver.status.set(driver.updateStatus(DriverStatus.Status.STARTING)); schedule(DEFAULT_TIME_BEFORE_YIELDING, maxIterations, executor, driver, listener); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java index 1a33bbbb9ff3a..b3326e395def2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java @@ -210,6 +210,8 @@ public enum Status implements ToXContentFragment { QUEUED, STARTING, RUNNING, + ASYNC, + WAITING, DONE; @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index be6a4a3cd19fb..60d5dd394afb7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -51,36 +51,36 @@ protected LuceneSourceOperator.Status mutateInstance(LuceneSourceOperator.Status randomValueOtherThan(instance.currentLeaf(), ESTestCase::randomNonNegativeInt), instance.totalLeaves(), instance.pagesEmitted(), - instance.leafPosition(), - instance.leafSize() + instance.slicePosition(), + instance.sliceSize() ); case 1 -> new LuceneSourceOperator.Status( instance.currentLeaf(), randomValueOtherThan(instance.totalLeaves(), ESTestCase::randomNonNegativeInt), instance.pagesEmitted(), - instance.leafPosition(), - instance.leafSize() + instance.slicePosition(), + instance.sliceSize() ); case 2 -> new LuceneSourceOperator.Status( instance.currentLeaf(), instance.totalLeaves(), randomValueOtherThan(instance.pagesEmitted(), ESTestCase::randomNonNegativeInt), - instance.leafPosition(), - instance.leafSize() + instance.slicePosition(), + instance.sliceSize() ); case 3 -> new LuceneSourceOperator.Status( instance.currentLeaf(), instance.totalLeaves(), instance.pagesEmitted(), - randomValueOtherThan(instance.leafPosition(), ESTestCase::randomNonNegativeInt), - instance.leafSize() + randomValueOtherThan(instance.slicePosition(), ESTestCase::randomNonNegativeInt), + instance.sliceSize() ); case 4 -> new LuceneSourceOperator.Status( instance.currentLeaf(), instance.totalLeaves(), instance.pagesEmitted(), - instance.leafPosition(), - randomValueOtherThan(instance.leafSize(), ESTestCase::randomNonNegativeInt) + instance.slicePosition(), + randomValueOtherThan(instance.sliceSize(), ESTestCase::randomNonNegativeInt) ); default -> throw new UnsupportedOperationException(); }; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 0f47b5c29cc3b..e08dfedd58811 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -171,6 +171,7 @@ public static void runDriver(List drivers) { new SequenceLongBlockSourceOperator(LongStream.range(0, between(1, 100)), between(1, 100)), List.of(), new PageConsumerOperator(page -> {}), + Driver.DEFAULT_STATUS_INTERVAL, () -> {} ) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 97c43920f54d0..ec9fbac2c0368 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -268,14 +268,32 @@ void runConcurrentTest( String description = "sink-" + i; ExchangeSinkOperator sinkOperator = new ExchangeSinkOperator(exchangeSink.get(), Function.identity()); DriverContext dc = new DriverContext(); - Driver d = new Driver("test-session:1", dc, () -> description, seqNoGenerator.get(dc), List.of(), sinkOperator, () -> {}); + Driver d = new Driver( + "test-session:1", + dc, + () -> description, + seqNoGenerator.get(dc), + List.of(), + sinkOperator, + Driver.DEFAULT_STATUS_INTERVAL, + () -> {} + ); drivers.add(d); } for (int i = 0; i < numSources; i++) { String description = "source-" + i; ExchangeSourceOperator sourceOperator = new ExchangeSourceOperator(exchangeSource.get()); DriverContext dc = new DriverContext(); - Driver d = new Driver("test-session:2", dc, () -> description, sourceOperator, List.of(), seqNoCollector.get(dc), () -> {}); + Driver d = new Driver( + "test-session:2", + dc, + () -> description, + sourceOperator, + List.of(), + seqNoCollector.get(dc), + Driver.DEFAULT_STATUS_INTERVAL, + () -> {} + ); drivers.add(d); } PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index a5931b15f76a9..cae86e44c69d0 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.lucene.LuceneSourceOperator; @@ -112,54 +113,59 @@ public void setupIndex() throws IOException { bulk.get(); } - @AwaitsFix(bugUrl = "the task status is only updated after max_iterations") public void testTaskContents() throws Exception { ActionFuture response = startEsql(); - getTasksStarting(); - List foundTasks = getTasksRunning(); - int luceneSources = 0; - int valuesSourceReaders = 0; - int exchangeSources = 0; - int exchangeSinks = 0; - for (TaskInfo task : foundTasks) { - DriverStatus status = (DriverStatus) task.status(); - assertThat(status.sessionId(), not(emptyOrNullString())); - for (DriverStatus.OperatorStatus o : status.activeOperators()) { - if (o.operator().equals("LuceneSourceOperator[shardId=0, maxPageSize=" + PAGE_SIZE + "]")) { - LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); - assertThat(oStatus.currentLeaf(), lessThanOrEqualTo(oStatus.totalLeaves())); - assertThat(oStatus.leafPosition(), lessThanOrEqualTo(oStatus.leafSize())); - luceneSources++; - continue; - } - if (o.operator().equals("ValuesSourceReaderOperator[field = pause_me]")) { - ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); - assertThat(oStatus.readersBuilt(), equalTo(Map.of("LongValuesReader", 1))); - assertThat(oStatus.pagesProcessed(), greaterThanOrEqualTo(1)); - valuesSourceReaders++; - continue; - } - if (o.operator().equals("ExchangeSourceOperator")) { - ExchangeSourceOperator.Status oStatus = (ExchangeSourceOperator.Status) o.status(); - assertThat(oStatus.pagesWaiting(), greaterThanOrEqualTo(0)); - assertThat(oStatus.pagesEmitted(), greaterThanOrEqualTo(0)); - exchangeSources++; - continue; - } - if (o.operator().equals("ExchangeSinkOperator")) { - ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status(); - assertThat(oStatus.pagesAccepted(), greaterThanOrEqualTo(0)); - exchangeSinks++; + try { + getTasksStarting(); + scriptPermits.release(PAGE_SIZE); + List foundTasks = getTasksRunning(); + int luceneSources = 0; + int valuesSourceReaders = 0; + int exchangeSources = 0; + int exchangeSinks = 0; + for (TaskInfo task : foundTasks) { + DriverStatus status = (DriverStatus) task.status(); + assertThat(status.sessionId(), not(emptyOrNullString())); + for (DriverStatus.OperatorStatus o : status.activeOperators()) { + if (o.operator().startsWith("LuceneSourceOperator[maxPageSize=" + PAGE_SIZE)) { + LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); + assertThat(oStatus.currentLeaf(), lessThanOrEqualTo(oStatus.totalLeaves())); + assertThat(oStatus.slicePosition(), greaterThanOrEqualTo(0)); + if (oStatus.sliceSize() != 0) { + assertThat(oStatus.slicePosition(), lessThanOrEqualTo(oStatus.sliceSize())); + } + luceneSources++; + continue; + } + if (o.operator().equals("ValuesSourceReaderOperator[field = pause_me]")) { + ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); + assertThat(oStatus.readersBuilt(), equalTo(Map.of("LongValuesReader", 1))); + assertThat(oStatus.pagesProcessed(), greaterThanOrEqualTo(1)); + valuesSourceReaders++; + continue; + } + if (o.operator().equals("ExchangeSourceOperator")) { + ExchangeSourceOperator.Status oStatus = (ExchangeSourceOperator.Status) o.status(); + assertThat(oStatus.pagesWaiting(), greaterThanOrEqualTo(0)); + assertThat(oStatus.pagesEmitted(), greaterThanOrEqualTo(0)); + exchangeSources++; + continue; + } + if (o.operator().equals("ExchangeSinkOperator")) { + ExchangeSinkOperator.Status oStatus = (ExchangeSinkOperator.Status) o.status(); + assertThat(oStatus.pagesAccepted(), greaterThanOrEqualTo(0)); + exchangeSinks++; + } } } + assertThat(luceneSources, greaterThanOrEqualTo(1)); + assertThat(valuesSourceReaders, equalTo(1)); + assertThat(exchangeSinks, greaterThanOrEqualTo(1)); + assertThat(exchangeSources, equalTo(1)); + } finally { + scriptPermits.release(Integer.MAX_VALUE); + assertThat(Iterators.flatMap(response.get().values(), i -> i).next(), equalTo((long) NUM_DOCS)); } - assertThat(luceneSources, greaterThanOrEqualTo(1)); - assertThat(valuesSourceReaders, equalTo(1)); - assertThat(exchangeSinks, greaterThanOrEqualTo(1)); - assertThat(exchangeSources, equalTo(1)); - - scriptPermits.release(Integer.MAX_VALUE); - assertThat(response.get().values(), equalTo(List.of(List.of((long) NUM_DOCS)))); } public void testCancelRead() throws Exception { @@ -194,8 +200,17 @@ public void testCancelEsqlTask() throws Exception { private ActionFuture startEsql() { scriptPermits.drainPermits(); - scriptPermits.release(between(1, 10)); - var pragmas = new QueryPragmas(Settings.builder().put("data_partitioning", "shard").put("page_size", PAGE_SIZE).build()); + scriptPermits.release(between(1, 5)); + var pragmas = new QueryPragmas( + Settings.builder() + // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. + .put("data_partitioning", "shard") + // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. + .put("page_size", PAGE_SIZE) + // Report the status after every action + .put("status_interval", "0ms") + .build() + ); return new EsqlQueryRequestBuilder(client(), EsqlQueryAction.INSTANCE).query("from test | stats sum(pause_me)") .pragmas(pragmas) .execute(); @@ -233,7 +248,13 @@ private List getTasksStarting() throws Exception { assertThat(task.description(), either(equalTo(READ_DESCRIPTION)).or(equalTo(MERGE_DESCRIPTION))); DriverStatus status = (DriverStatus) task.status(); logger.info("{}", status.status()); - assertThat(status.status(), equalTo(DriverStatus.Status.STARTING)); + /* + * Accept tasks that are either starting or have gone + * immediately async. The coordinating task is likely + * to have done the latter and the reading task should + * have done the former. + */ + assertThat(status.status(), either(equalTo(DriverStatus.Status.STARTING)).or(equalTo(DriverStatus.Status.ASYNC))); } foundTasks.addAll(tasks); }); @@ -256,10 +277,13 @@ private List getTasksRunning() throws Exception { assertThat(tasks, hasSize(equalTo(2))); for (TaskInfo task : tasks) { assertThat(task.action(), equalTo(DriverTaskRunner.ACTION_NAME)); - assertThat(task.description(), either(equalTo(READ_DESCRIPTION)).or(equalTo(MERGE_DESCRIPTION))); DriverStatus status = (DriverStatus) task.status(); - // TODO: Running is not after one iteration? - assertThat(status.status(), equalTo(DriverStatus.Status.STARTING)); + assertThat(task.description(), either(equalTo(READ_DESCRIPTION)).or(equalTo(MERGE_DESCRIPTION))); + if (task.description().equals(READ_DESCRIPTION)) { + assertThat(status.status(), equalTo(DriverStatus.Status.RUNNING)); + } else { + assertThat(status.status(), equalTo(DriverStatus.Status.ASYNC)); + } } foundTasks.addAll(tasks); }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 4d783b9a1012c..2d1480a947d25 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -205,6 +205,7 @@ private void doLookup( queryOperator, intermediateOperators, outputOperator, + Driver.DEFAULT_STATUS_INTERVAL, searchContext ); task.addListener(() -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index cd653c64213c3..d5b8b6df1db8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -41,6 +41,7 @@ import org.elasticsearch.compute.operator.topn.TopNOperator; import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -149,7 +150,10 @@ public LocalExecutionPlan plan(PhysicalPlan node) { PhysicalOperation physicalOperation = plan(node, context); context.addDriverFactory( - new DriverFactory(new DriverSupplier(context.bigArrays, physicalOperation), context.driverParallelism().get()) + new DriverFactory( + new DriverSupplier(context.bigArrays, physicalOperation, configuration.pragmas().statusInterval()), + context.driverParallelism().get() + ) ); return new LocalExecutionPlan(context.driverFactories); @@ -679,8 +683,10 @@ int pageSize(Integer estimatedRowSize) { } } - record DriverSupplier(BigArrays bigArrays, PhysicalOperation physicalOperation) implements Function, Describable { - + record DriverSupplier(BigArrays bigArrays, PhysicalOperation physicalOperation, TimeValue statusInterval) + implements + Function, + Describable { @Override public Driver apply(String sessionId) { SourceOperator source = null; @@ -693,7 +699,7 @@ public Driver apply(String sessionId) { physicalOperation.operators(operators, driverContext); sink = physicalOperation.sink(driverContext); success = true; - return new Driver(sessionId, driverContext, physicalOperation::describe, source, operators, sink, () -> {}); + return new Driver(sessionId, driverContext, physicalOperation::describe, source, operators, sink, statusInterval, () -> {}); } finally { if (false == success) { Releasables.close(source, () -> Releasables.close(operators), sink); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index bb1f669dc2b43..602e04ff08f6c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -14,6 +14,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.lucene.DataPartitioning; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -42,6 +45,12 @@ public final class QueryPragmas implements Writeable { */ public static final Setting PAGE_SIZE = Setting.intSetting("page_size", 0, 0); + /** + * The minimum interval between syncs of the {@link DriverStatus}, making + * the status available to task API. + */ + public static final Setting STATUS_INTERVAL = Setting.timeSetting("status_interval", Driver.DEFAULT_STATUS_INTERVAL); + public static final QueryPragmas EMPTY = new QueryPragmas(Settings.EMPTY); private final Settings settings; @@ -87,6 +96,14 @@ public int pageSize() { return PAGE_SIZE.get(settings); } + /** + * The minimum interval between syncs of the {@link DriverStatus}, making + * the status available to task API. + */ + public TimeValue statusInterval() { + return STATUS_INTERVAL.get(settings); + } + public boolean isEmpty() { return settings.isEmpty(); } From b5ea3560f19c94015aa06d9e9e421c697d38216e Mon Sep 17 00:00:00 2001 From: John Verwolf Date: Thu, 14 Sep 2023 07:18:02 -0700 Subject: [PATCH 061/114] Forward port 8.10.0 docs (#99505) --- .../reference/migration/migrate_8_10.asciidoc | 78 +++++- docs/reference/release-notes/8.10.0.asciidoc | 251 +++++++++++++++++- 2 files changed, 323 insertions(+), 6 deletions(-) diff --git a/docs/reference/migration/migrate_8_10.asciidoc b/docs/reference/migration/migrate_8_10.asciidoc index 84600fd96bd69..a1d132812ad03 100644 --- a/docs/reference/migration/migrate_8_10.asciidoc +++ b/docs/reference/migration/migrate_8_10.asciidoc @@ -9,11 +9,81 @@ your application to {es} 8.10. See also <> and <>. -coming::[8.10.0] - - [discrete] [[breaking-changes-8.10]] === Breaking changes -There are no breaking changes in {es} 8.10. +The following changes in {es} 8.10 might affect your applications +and prevent them from operating normally. +Before upgrading to 8.10, review these changes and take the described steps +to mitigate the impact. + + +There are no notable breaking changes in {es} 8.10. +But there are some less critical breaking changes. + +[discrete] +[[breaking_810_cluster_and_node_setting_changes]] +==== Cluster and node setting changes + +[[remove_unused_executor_builder_for_vector_tile_plugin]] +.Remove the unused executor builder for vector tile plugin +[%collapsible] +==== +*Details* + +The threadpool called `vectortile` is a left over from the original development of the vector tile search end point and it is used nowhere. It can still be a breaking change if it is configured on the elasticsearch yml file, for example by changing the threadpool size `thread_pool.vectortile.size=8`' + +*Impact* + +In the case the threadpool appears on the yaml file, Elasticsearch will not start until those lines are removed. +==== + +[discrete] +[[breaking_810_java_api_changes]] +==== Java API changes + +[[change_pre_configured_cached_analyzer_components_to_use_indexversion_instead_of_version-highlight]] +.Change pre-configured and cached analyzer components to use IndexVersion instead of Version +[%collapsible] +==== +*Details* + +This PR changes the types used to obtain pre-configured components from Version to IndexVersion, +with corresponding changes to method names. + +Prior to 8.10, there is a one-to-one mapping between node version and index version, with corresponding constants +in the IndexVersion class. +Starting in 8.10, IndexVersion is versioned independently of node version, and will be a simple incrementing number. +For more information on how to use IndexVersion and other version types, please see the contributing guide. + +*Impact* + +Analysis components now take IndexVersion instead of Version +==== + + +[discrete] +[[deprecated-8.10]] +=== Deprecations + +The following functionality has been deprecated in {es} 8.10 +and will be removed in a future version. +While this won't have an immediate impact on your applications, +we strongly encourage you to take the described steps to update your code +after upgrading to 8.10. + +To find out if you are using any deprecated functionality, +enable <>. + +[discrete] +[[deprecations_810_authorization]] +==== Authorization deprecations + +[[mark_apm_user_for_removal_in_future_major_release]] +.Mark `apm_user` for removal in a future major release +[%collapsible] +==== +*Details* + +The `apm_user` role has been deprecated and will be removed in a future major release. Users should migrate to `editor` and `viewer` roles + +*Impact* + +Users will have to migrate to `editor` and `viewer` roles +==== + diff --git a/docs/reference/release-notes/8.10.0.asciidoc b/docs/reference/release-notes/8.10.0.asciidoc index cc24df451ed94..ea0af6c485f25 100644 --- a/docs/reference/release-notes/8.10.0.asciidoc +++ b/docs/reference/release-notes/8.10.0.asciidoc @@ -1,8 +1,255 @@ [[release-notes-8.10.0]] == {es} version 8.10.0 -coming[8.10.0] - Also see <>. +[[breaking-8.10.0]] +[float] +=== Breaking changes + +Analysis:: +* Change pre-configured and cached analyzer components to use IndexVersion instead of Version {es-pull}97319[#97319] + +Geo:: +* Remove the unused executor builder for vector tile plugin {es-pull}96577[#96577] + +[[bug-8.10.0]] +[float] +=== Bug fixes + +Aggregations:: +* Cardinality nested in time series doc values bug {es-pull}99007[#99007] +* Skip segment for `MatchNoDocsQuery` filters {es-pull}98295[#98295] (issue: {es-issue}94637[#94637]) + +Allocation:: +* Do not assign ignored shards {es-pull}98265[#98265] +* Remove exception wrapping in `BatchedRerouteService` {es-pull}97224[#97224] + +Application:: +* [Profiling] Abort index creation on outdated index {es-pull}98864[#98864] +* [Profiling] Consider static settings in status {es-pull}97890[#97890] +* [Profiling] Mark executables without a name {es-pull}98884[#98884] + +CRUD:: +* Add missing sync on `indicesThatCannotBeCreated` {es-pull}97869[#97869] + +Cluster Coordination:: +* Fix cluster bootstrap warning for single-node discovery {es-pull}96895[#96895] (issue: {es-issue}96874[#96874]) +* Fix election scheduling after discovery outage {es-pull}98420[#98420] +* Improve reliability of elections with message delays {es-pull}98354[#98354] (issue: {es-issue}97909[#97909]) +* Make `TransportAddVotingConfigExclusionsAction` retryable {es-pull}98386[#98386] +* Release master service task on timeout {es-pull}97711[#97711] + +Data streams:: +* Avoid lifecycle NPE in the data stream lifecycle usage API {es-pull}98260[#98260] + +Distributed:: +* Avoid `transport_worker` thread in `TransportBroadcastAction` {es-pull}98001[#98001] +* Avoid `transport_worker` thread in `TransportBroadcastByNodeAction` {es-pull}97920[#97920] (issue: {es-issue}97914[#97914]) +* Fork response reading in `TransportNodesAction` {es-pull}97899[#97899] + +Downsampling:: +* Copy "index.lifecycle.name" for ILM managed indices {es-pull}97110[#97110] (issue: {es-issue}96732[#96732]) +* Downsampling: copy the `_tier_preference` setting {es-pull}96982[#96982] (issue: {es-issue}96733[#96733]) + +EQL:: +* Fix async missing events {es-pull}97718[#97718] (issue: {es-issue}97644[#97644]) + +Geo:: +* Fix how Maps#flatten handle map values inside a list {es-pull}98828[#98828] +* Fix mvt error when returning partial results {es-pull}98765[#98765] (issue: {es-issue}98730[#98730]) + +Health:: +* `_health_report` SLM indicator should use the policy ID (not the name) {es-pull}99111[#99111] + +Indices APIs:: +* Ensure frozen indices have correct tier preference {es-pull}97967[#97967] + +Infra/REST API:: +* Fix possible NPE when transportversion is null in `MainResponse` {es-pull}97203[#97203] + +Ingest Node:: +* Revert "Add mappings for enrich fields" {es-pull}98683[#98683] + +Machine Learning:: +* Avoid risk of OOM in datafeeds when memory is constrained {es-pull}98324[#98324] (issue: {es-issue}89769[#89769]) +* Detect infinite loop in the WordPiece tokenizer {es-pull}98206[#98206] +* Fix to stop aggregatable subobjects from being considered multi-fields, to support `"subobjects": false` in data frame analytics {es-pull}97705[#97705] (issue: {es-issue}88605[#88605]) +* Fix weird `change_point` bug where all data values are equivalent {es-pull}97588[#97588] +* The model loading service should not notify listeners in a sync block {es-pull}97142[#97142] + +Mapping:: +* Fix `fields` API with `subobjects: false` {es-pull}97092[#97092] (issue: {es-issue}96700[#96700]) + +Network:: +* Fork remote-cluster response handling {es-pull}97922[#97922] + +Search:: +* Fork CCS remote-cluster responses {es-pull}98124[#98124] (issue: {es-issue}97997[#97997]) +* Fork CCS search-shards handling {es-pull}98209[#98209] +* Improve test coverage for CCS search cancellation and fix response bugs {es-pull}97029[#97029] +* Make `terminate_after` early termination friendly {es-pull}97540[#97540] (issue: {es-issue}97269[#97269]) +* Track `max_score` in collapse when requested {es-pull}97703[#97703] (issue: {es-issue}97653[#97653]) + +Security:: +* Fix NPE when `GetUser` with profile uid before profile index exists {es-pull}98961[#98961] + +Snapshot/Restore:: +* Fix `BlobCacheBufferedIndexInput` large read after clone {es-pull}98970[#98970] + +TSDB:: +* Mapped field types searchable with doc values {es-pull}97724[#97724] + +Transform:: +* Fix transform incorrectly calculating date bucket on updating old data {es-pull}97401[#97401] (issue: {es-issue}97101[#97101]) + +Watcher:: +* Changing watcher to disable cookies in shared http client {es-pull}97591[#97591] + +[[deprecation-8.10.0]] +[float] +=== Deprecations + +Authorization:: +* Mark `apm_user` for removal in a future major release {es-pull}87674[#87674] + +[[enhancement-8.10.0]] +[float] +=== Enhancements + +Aggregations:: +* Improve error message when aggregation doesn't support counter field {es-pull}93545[#93545] +* Set default index mode for `TimeSeries` to `null` {es-pull}98808[#98808] (issue: {es-issue}97429[#97429]) + +Allocation:: +* Add `node.roles` to cat allocation API {es-pull}96994[#96994] + +Application:: +* [Profiling] Add initial support for upgrades {es-pull}97380[#97380] +* [Profiling] Support index migrations {es-pull}97773[#97773] + +Authentication:: +* Avoid double get {es-pull}98067[#98067] (issue: {es-issue}97928[#97928]) +* Give all acces to .slo-observability.* indice to kibana user {es-pull}97539[#97539] +* Refresh tokens without search {es-pull}97395[#97395] + +Authorization:: +* Add "operator" field to authenticate response {es-pull}97234[#97234] +* Read operator privs enabled from Env settings {es-pull}98246[#98246] +* [Fleet] Allow `kibana_system` to put datastream lifecycle {es-pull}97732[#97732] + +Data streams:: +* Install data stream template for Kibana reporting {es-pull}97765[#97765] + +Downsampling:: +* Change `MetricFieldProducer#metrics` field type from list to array {es-pull}97344[#97344] +* Improve iterating over many field producers during downsample operation {es-pull}97281[#97281] +* Run downsampling using persistent tasks {es-pull}97557[#97557] (issue: {es-issue}93582[#93582]) + +EQL:: +* EQL to use only the necessary fields in the internal `field_caps` calls {es-pull}98987[#98987] + +Engine:: +* Fix edge case for active flag for flush on idle {es-pull}97332[#97332] (issue: {es-issue}97154[#97154]) + +Health:: +* Adding special logic to the disk health check for search-only nodes {es-pull}98508[#98508] +* Health API Periodic Logging {es-pull}96772[#96772] + +ILM+SLM:: +* Separating SLM from ILM {es-pull}98184[#98184] + +Infra/Core:: +* Infrastructure to report upon document parsing {es-pull}97961[#97961] + +Infra/Node Lifecycle:: +* Check ILM status before reporting node migration STALLED {es-pull}98367[#98367] (issue: {es-issue}89486[#89486]) + +Infra/Plugins:: +* Adding `ApiFilteringActionFilter` {es-pull}97985[#97985] + +Infra/REST API:: +* Enable Serverless API protections dynamically {es-pull}97079[#97079] +* Make `RestController` pluggable {es-pull}98187[#98187] + +Infra/Settings:: +* Mark customer settings for serverless {es-pull}98051[#98051] + +Ingest Node:: +* Allow custom geo ip database files to be downloaded {es-pull}97850[#97850] + +Network:: +* Add request header size limit for RCS transport connections {es-pull}98692[#98692] + +Search:: +* Add `completion_time` time field to `async_search` get and status response {es-pull}97700[#97700] (issue: {es-issue}88640[#88640]) +* Add setting for search parallelism {es-pull}98455[#98455] +* Add support for concurrent collection when size is greater than zero {es-pull}98425[#98425] +* Cross-cluster search provides details about search on each cluster {es-pull}97731[#97731] +* Enable parallel collection in Dfs phase {es-pull}97416[#97416] +* Exclude clusters from a cross-cluster search {es-pull}97865[#97865] +* Improve MatchNoDocsQuery description {es-pull}96069[#96069] (issue: {es-issue}95741[#95741]) +* Improve exists query rewrite {es-pull}97159[#97159] +* Improve match query rewrite {es-pull}97208[#97208] +* Improve prefix query rewrite {es-pull}97209[#97209] +* Improve wildcard query and terms query rewrite {es-pull}97594[#97594] +* Introduce Synonyms Management API used for synonym and synonym_graph filters {es-pull}97962[#97962] (issue: {es-issue}38523[#38523]) +* Introduce a collector manager for `PartialHitCountCollector` {es-pull}97550[#97550] +* Introduce a collector manager for `QueryPhaseCollector` {es-pull}97410[#97410] +* Limit `_terms_enum` prefix size {es-pull}97488[#97488] (issue: {es-issue}96572[#96572]) +* Support minimum_should_match field for terms_set query {es-pull}96082[#96082] +* Support type for simple query string {es-pull}96717[#96717] +* Unwrap IOException in `ContextIndexSearcher` concurrent code-path {es-pull}98459[#98459] +* Use a collector manager in DfsPhase Knn Search {es-pull}96689[#96689] +* Use the Weight#matches mode for highlighting by default {es-pull}96068[#96068] +* Wire `QueryPhaseCollectorManager` into the query phase {es-pull}97726[#97726] +* Wire concurrent top docs collector managers when size is 0 {es-pull}97755[#97755] +* `ProfileCollectorManager` to support child profile collectors {es-pull}97387[#97387] +* cleanup some code NoriTokenizerFactory and KuromojiTokenizerFactory {es-pull}92574[#92574] + +Security:: +* Add an API for managing the settings of Security system indices {es-pull}97630[#97630] +* Support getting active-only API keys via Get API keys API {es-pull}98259[#98259] (issue: {es-issue}97995[#97995]) + +Snapshot/Restore:: +* Add Setting to optionally use mmap for shared cache IO {es-pull}97581[#97581] +* Collect additional object store stats for S3 {es-pull}98083[#98083] +* HDFS plugin add replication_factor param {es-pull}94132[#94132] + +Store:: +* Allow Lucene directory implementations to estimate their size {es-pull}97822[#97822] +* Allow `ByteSizeDirectory` to expose their data set sizes {es-pull}98085[#98085] + +TSDB:: +* Add tsdb metrics builtin component template {es-pull}97602[#97602] +* Include more downsampling status statistics {es-pull}96930[#96930] (issue: {es-issue}96760[#96760]) +* `TimeSeriesIndexSearcher` to offload to the provided executor {es-pull}98414[#98414] + +Transform:: +* Support boxplot aggregation in transform {es-pull}96515[#96515] + +[[feature-8.10.0]] +[float] +=== New features + +Application:: +* Enable Query Rules as technical preview {es-pull}97466[#97466] +* [Enterprise Search] Add connectors indices and ent-search pipeline {es-pull}97463[#97463] + +Data streams:: +* Introduce downsampling configuration for data stream lifecycle {es-pull}97041[#97041] + +Search:: +* Introduce executor for concurrent search {es-pull}98204[#98204] + +Security:: +* Beta release for API key based cross-cluster access {es-pull}98307[#98307] + +[[upgrade-8.10.0]] +[float] +=== Upgrades + +Network:: +* Upgrade Netty to 4.1.94.Final {es-pull}97040[#97040] + From fe7fe7dc047d994cef8e50aedb2dad0b05ed0ea4 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 14 Sep 2023 16:30:44 +0200 Subject: [PATCH 062/114] Increase limit on stdout, stderr in docker tests (#99578) json logs used in docker tests are consuming more space than the previous 100KB setting the limit to 1MB relates #99508 --- qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java index fc69d0de71a26..d06110deb9ef0 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java @@ -202,7 +202,7 @@ private Result runScriptIgnoreExitCode(String[] command) { private String readFileIfExists(Path path) throws IOException { if (Files.exists(path)) { long size = Files.size(path); - final int maxFileSize = 100 * 1024; + final int maxFileSize = 1024 * 1024; if (size > maxFileSize) { // file is really big, truncate try (var br = Files.newBufferedReader(path, StandardCharsets.UTF_8)) { From 7ad521f6ff80e651a69e5ca49eea9950bc4a64bf Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 14 Sep 2023 15:35:47 +0100 Subject: [PATCH 063/114] Fix deadlock between Cache.put and invalidateAll (#99480) The invalidateAll method is taking out the lru lock and segment locks in a different order to the put method, when the put is replacing an existing value. This results in a deadlock between the two methods as they try to swap locks. This fixes it by making sure invalidateAll takes out locks in the same order as put. This is difficult to test because the put needs to be replacing an existing value, and invalidateAll clears the cache, resulting in subsequent puts not hitting the deadlock condition. A test that overrides some internal implementations to expose this particular deadlock will be coming later. --- docs/changelog/99480.yaml | 6 +++++ .../org/elasticsearch/common/cache/Cache.java | 22 +++++++++---------- 2 files changed, 17 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/99480.yaml diff --git a/docs/changelog/99480.yaml b/docs/changelog/99480.yaml new file mode 100644 index 0000000000000..08dcdceca60b0 --- /dev/null +++ b/docs/changelog/99480.yaml @@ -0,0 +1,6 @@ +pr: 99480 +summary: Fix deadlock between Cache.put and Cache.invalidateAll +area: Infra/Core +type: bug +issues: + - 99326 diff --git a/server/src/main/java/org/elasticsearch/common/cache/Cache.java b/server/src/main/java/org/elasticsearch/common/cache/Cache.java index bb4bd0062b8db..7cd6fa471040a 100644 --- a/server/src/main/java/org/elasticsearch/common/cache/Cache.java +++ b/server/src/main/java/org/elasticsearch/common/cache/Cache.java @@ -521,12 +521,12 @@ public void invalidateAll() { Entry h; boolean[] haveSegmentLock = new boolean[NUMBER_OF_SEGMENTS]; - try { - for (int i = 0; i < NUMBER_OF_SEGMENTS; i++) { - segments[i].segmentLock.writeLock().lock(); - haveSegmentLock[i] = true; - } - try (ReleasableLock ignored = lruLock.acquire()) { + try (ReleasableLock ignored = lruLock.acquire()) { + try { + for (int i = 0; i < NUMBER_OF_SEGMENTS; i++) { + segments[i].segmentLock.writeLock().lock(); + haveSegmentLock[i] = true; + } h = head; for (CacheSegment segment : segments) { segment.map = null; @@ -539,11 +539,11 @@ public void invalidateAll() { head = tail = null; count = 0; weight = 0; - } - } finally { - for (int i = NUMBER_OF_SEGMENTS - 1; i >= 0; i--) { - if (haveSegmentLock[i]) { - segments[i].segmentLock.writeLock().unlock(); + } finally { + for (int i = NUMBER_OF_SEGMENTS - 1; i >= 0; i--) { + if (haveSegmentLock[i]) { + segments[i].segmentLock.writeLock().unlock(); + } } } } From 840d49ac9be1dfe5d78cee19165d9fb91ec3ed43 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 14 Sep 2023 17:38:43 +0200 Subject: [PATCH 064/114] Mute testCancelRead() (#99585) Related https://github.com/elastic/elasticsearch/issues/99582 --- .../org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index cae86e44c69d0..92b81057ec698 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -168,6 +168,7 @@ public void testTaskContents() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99582") public void testCancelRead() throws Exception { ActionFuture response = startEsql(); List infos = getTasksStarting(); From 07f6a65b24c285a892ff64d6e75dd302135fa683 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 14 Sep 2023 17:24:42 +0100 Subject: [PATCH 065/114] ESQL: Remove default driver context (#99573) This commit removes the default driver implementation. --- .../compute/operator/AggregatorBenchmark.java | 2 +- .../compute/operator/EvalBenchmark.java | 19 ++++-- .../compute/operator/DriverContext.java | 8 --- .../compute/operator/EvalOperator.java | 2 +- .../compute/operator/FilterOperator.java | 2 +- .../operator/ThrowingDriverContext.java | 64 +++++++++++++++++++ .../elasticsearch/compute/OperatorTests.java | 23 +++++-- .../AggregatorFunctionTestCase.java | 12 ++-- ...untDistinctIntAggregatorFunctionTests.java | 2 +- ...ntDistinctLongAggregatorFunctionTests.java | 2 +- .../GroupingAggregatorFunctionTestCase.java | 28 ++++---- .../SumDoubleAggregatorFunctionTests.java | 10 +-- .../SumIntAggregatorFunctionTests.java | 2 +- .../SumLongAggregatorFunctionTests.java | 4 +- .../lucene/LuceneSourceOperatorTests.java | 2 +- .../lucene/LuceneTopNSourceOperatorTests.java | 2 +- .../ValuesSourceReaderOperatorTests.java | 6 +- .../compute/operator/AnyOperatorTestCase.java | 12 +++- .../compute/operator/AsyncOperatorTests.java | 14 +++- .../compute/operator/DriverContextTests.java | 37 ++++++----- .../operator/ForkingOperatorTestCase.java | 12 ++-- .../compute/operator/LimitOperatorTests.java | 4 +- .../compute/operator/OperatorTestCase.java | 8 +-- .../compute/operator/RowOperatorTests.java | 7 +- .../exchange/ExchangeServiceTests.java | 16 ++++- .../operator/topn/TopNOperatorTests.java | 18 +++--- .../xpack/esql/lookup/EnrichLookupIT.java | 11 +++- .../esql/enrich/EnrichLookupService.java | 12 +++- .../evaluator/mapper/EvaluatorMapper.java | 4 +- .../esql/plugin/TransportEsqlQueryAction.java | 2 +- .../function/AbstractFunctionTestCase.java | 20 ++++-- .../scalar/conditional/CaseTests.java | 7 +- .../function/scalar/math/RoundTests.java | 7 +- .../AbstractMultivalueFunctionTestCase.java | 3 +- .../scalar/multivalue/MvConcatTests.java | 2 +- .../function/scalar/nulls/CoalesceTests.java | 3 +- .../function/scalar/string/ConcatTests.java | 5 +- .../function/scalar/string/LeftTests.java | 3 +- .../function/scalar/string/RightTests.java | 3 +- .../function/scalar/string/SplitTests.java | 3 +- .../scalar/string/SubstringTests.java | 5 +- .../AbstractBinaryOperatorTestCase.java | 3 +- .../operator/arithmetic/NegTests.java | 3 +- .../xpack/esql/planner/EvalMapperTests.java | 13 +++- 44 files changed, 283 insertions(+), 144 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 8cad100de27f4..75fe76ea8fbb0 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -142,7 +142,7 @@ private static Operator operator(String grouping, String op, String dataType) { return new HashAggregationOperator( List.of(supplier(op, dataType, groups.size()).groupingAggregatorFactory(AggregatorMode.SINGLE)), () -> BlockHash.build(groups, BIG_ARRAYS, 16 * 1024, false), - new DriverContext() + new DriverContext(BigArrays.NON_RECYCLING_INSTANCE) ); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index ce839a4c8eace..909bf16815a0d 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -8,6 +8,7 @@ package org.elasticsearch.benchmark.compute.operator; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -80,14 +81,14 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { return switch (operation) { case "abs" -> { FieldAttribute longField = longField(); - yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(new DriverContext()); + yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(driverContext()); } case "add" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataTypes.LONG)), layout(longField) - ).get(new DriverContext()); + ).get(driverContext()); } case "date_trunc" -> { FieldAttribute timestamp = new FieldAttribute( @@ -98,28 +99,28 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { yield EvalMapper.toEvaluator( new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), EsqlDataTypes.TIME_DURATION), timestamp), layout(timestamp) - ).get(new DriverContext()); + ).get(driverContext()); } case "equal_to_const" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataTypes.LONG)), layout(longField) - ).get(new DriverContext()); + ).get(driverContext()); } case "long_equal_to_long" -> { FieldAttribute lhs = longField(); FieldAttribute rhs = longField(); - yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(new DriverContext()); + yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext()); } case "long_equal_to_int" -> { FieldAttribute lhs = longField(); FieldAttribute rhs = intField(); - yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(new DriverContext()); + yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(driverContext()); } case "mv_min", "mv_min_ascending" -> { FieldAttribute longField = longField(); - yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(new DriverContext()); + yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext()); } default -> throw new UnsupportedOperationException(); }; @@ -259,4 +260,8 @@ private static void run(String operation) { checkExpected(operation, output); } } + + static DriverContext driverContext() { + return new DriverContext(BigArrays.NON_RECYCLING_INSTANCE); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index 4e95e582769b5..db60b45f4516c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -35,9 +35,6 @@ */ public class DriverContext { - /** A default driver context. The returned bigArrays is non recycling. */ - public static DriverContext DEFAULT = new DriverContext(BigArrays.NON_RECYCLING_INSTANCE); - // Working set. Only the thread executing the driver will update this set. Set workingSet = Collections.newSetFromMap(new IdentityHashMap<>()); @@ -45,11 +42,6 @@ public class DriverContext { private final BigArrays bigArrays; - // For testing - public DriverContext() { - this(BigArrays.NON_RECYCLING_INSTANCE); - } - public DriverContext(BigArrays bigArrays) { Objects.requireNonNull(bigArrays); this.bigArrays = bigArrays; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index 7202f05b5562a..221c65f8a4ce3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -25,7 +25,7 @@ public Operator get(DriverContext driverContext) { @Override public String describe() { - return "EvalOperator[evaluator=" + evaluator.get(DriverContext.DEFAULT) + "]"; + return "EvalOperator[evaluator=" + evaluator.get(new ThrowingDriverContext()) + "]"; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java index 20864373e8016..d3e7d6aa3a658 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FilterOperator.java @@ -27,7 +27,7 @@ public Operator get(DriverContext driverContext) { @Override public String describe() { - return "FilterOperator[evaluator=" + evaluatorSupplier.get(DriverContext.DEFAULT) + "]"; + return "FilterOperator[evaluator=" + evaluatorSupplier.get(new ThrowingDriverContext()) + "]"; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java new file mode 100644 index 0000000000000..d985d7649ee38 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasable; + +public class ThrowingDriverContext extends DriverContext { + public ThrowingDriverContext() { + super(new ThrowingBigArrays()); + } + + @Override + public BigArrays bigArrays() { + throw new AssertionError("should not reach here"); + } + + @Override + public boolean addReleasable(Releasable releasable) { + throw new AssertionError("should not reach here"); + } + + static class ThrowingBigArrays extends BigArrays { + + ThrowingBigArrays() { + super(null, null, "fake"); + } + + @Override + public ByteArray newByteArray(long size, boolean clearOnResize) { + throw new AssertionError("should not reach here"); + } + + @Override + public IntArray newIntArray(long size, boolean clearOnResize) { + throw new AssertionError("should not reach here"); + } + + @Override + public LongArray newLongArray(long size, boolean clearOnResize) { + throw new AssertionError("should not reach here"); + } + + @Override + public FloatArray newFloatArray(long size, boolean clearOnResize) { + throw new AssertionError("should not reach here"); + } + + @Override + public DoubleArray newDoubleArray(long size, boolean clearOnResize) { + throw new AssertionError("should not reach here"); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 135877e4f5405..d7ec9bcaf99ee 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; @@ -112,7 +113,7 @@ public void testQueryOperator() throws IOException { assertTrue("duplicated docId=" + docId, actualDocIds.add(docId)); } }); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); drivers.add(new Driver(driverContext, factory.get(driverContext), List.of(), docCollector, () -> {})); } OperatorTestCase.runDriver(drivers); @@ -144,9 +145,10 @@ public void testQueryOperator() throws IOException { } } + // @Repeat(iterations = 1) public void testGroupingWithOrdinals() throws Exception { final String gField = "g"; - final int numDocs = between(100, 10000); + final int numDocs = 2856; // between(100, 10000); final Map expectedCounts = new HashMap<>(); int keyLength = randomIntBetween(1, 10); try (BaseDirectoryWrapper dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { @@ -210,7 +212,7 @@ public String toString() { }; try (DirectoryReader reader = writer.getReader()) { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); Driver driver = new Driver( driverContext, @@ -258,14 +260,18 @@ public String toString() { LongBlock counts = page.getBlock(1); for (int i = 0; i < keys.getPositionCount(); i++) { BytesRef spare = new BytesRef(); - actualCounts.put(keys.getBytesRef(i, spare), counts.getLong(i)); + keys.getBytesRef(i, spare); + actualCounts.put(BytesRef.deepCopyOf(spare), counts.getLong(i)); } + // System.out.println("HEGO: keys.getPositionCount=" + keys.getPositionCount()); + // Releasables.close(keys); }), () -> {} ); OperatorTestCase.runDriver(driver); assertThat(actualCounts, equalTo(expectedCounts)); assertDriverContext(driverContext); + org.elasticsearch.common.util.MockBigArrays.ensureAllArraysAreReleased(); } } } @@ -276,7 +282,7 @@ public void testLimitOperator() { var values = randomList(positions, positions, ESTestCase::randomLong); var results = new ArrayList(); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( var driver = new Driver( driverContext, @@ -388,6 +394,13 @@ private BigArrays bigArrays() { return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); } + /** + * A {@link DriverContext} that won't throw {@link CircuitBreakingException}. + */ + protected final DriverContext driverContext() { + return new DriverContext(bigArrays()); + } + public static void assertDriverContext(DriverContext driverContext) { assertTrue(driverContext.isFinished()); assertThat(driverContext.getSnapshot().releasables(), empty()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index e2f1c606a4c25..a4b6c8b965962 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -92,7 +92,7 @@ public final void testIgnoresNulls() { int end = between(1_000, 100_000); List results = new ArrayList<>(); List input = CannedSourceOperator.collectPages(simpleInput(end)); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver d = new Driver( @@ -110,14 +110,14 @@ public final void testIgnoresNulls() { public final void testMultivalued() { int end = between(1_000, 100_000); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(new PositionMergingSourceOperator(simpleInput(end))); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } public final void testMultivaluedWithNulls() { int end = between(1_000, 100_000); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages( new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(end))) ); @@ -125,7 +125,7 @@ public final void testMultivaluedWithNulls() { } public final void testEmptyInput() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), List.of().iterator()); assertThat(results, hasSize(1)); @@ -133,7 +133,7 @@ public final void testEmptyInput() { } public final void testEmptyInputInitialFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List results = drive( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -147,7 +147,7 @@ public final void testEmptyInputInitialFinal() { } public final void testEmptyInputInitialIntermediateFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List results = drive( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index e559dc4effccb..974046469e518 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -62,7 +62,7 @@ protected void assertOutputFromEmpty(Block b) { } public void testRejectsDouble() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver d = new Driver( driverContext, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 57b90fb844f54..04cbe0ed53236 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -63,7 +63,7 @@ protected void assertOutputFromEmpty(Block b) { } public void testRejectsDouble() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver d = new Driver( driverContext, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 002790b3735d2..23015d066810a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -145,7 +145,7 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public final void testNullGroupsAndValues() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(simpleInput(end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -153,7 +153,7 @@ public final void testNullGroupsAndValues() { } public final void testNullGroups() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -182,7 +182,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl } public final void testNullValues() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -190,7 +190,7 @@ public final void testNullValues() { } public final void testNullValuesInitialIntermediateFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); List results = drive( @@ -218,7 +218,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl } public final void testMultivalued() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(1_000, 100_000); List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -226,7 +226,7 @@ public final void testMultivalued() { } public final void testMulitvaluedNullGroupsAndValues() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(mergeValues(simpleInput(end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -234,7 +234,7 @@ public final void testMulitvaluedNullGroupsAndValues() { } public final void testMulitvaluedNullGroup() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -242,7 +242,7 @@ public final void testMulitvaluedNullGroup() { } public final void testMulitvaluedNullValues() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); int end = between(50, 60); List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); @@ -250,12 +250,12 @@ public final void testMulitvaluedNullValues() { } public final void testNullOnly() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); assertNullOnly(List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); } public final void testNullOnlyInputInitialFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); assertNullOnly( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -265,7 +265,7 @@ public final void testNullOnlyInputInitialFinal() { } public final void testNullOnlyInputInitialIntermediateFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); assertNullOnly( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -294,12 +294,12 @@ private void assertNullOnly(List operators) { } public final void testNullSome() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); assertNullSome(List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); } public final void testNullSomeInitialFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); assertNullSome( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -309,7 +309,7 @@ public final void testNullSomeInitialFinal() { } public final void testNullSomeInitialIntermediateFinal() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); assertNullSome( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 909b582bec732..767f9a2d5c25b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -49,7 +49,7 @@ protected void assertSimpleOutput(List input, Block result) { } public void testOverflowSucceeds() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List results = new ArrayList<>(); try ( Driver d = new Driver( @@ -67,7 +67,7 @@ public void testOverflowSucceeds() { } public void testSummationAccuracy() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List results = new ArrayList<>(); try ( Driver d = new Driver( @@ -96,7 +96,7 @@ public void testSummationAccuracy() { : randomDoubleBetween(Double.MIN_VALUE, Double.MAX_VALUE, true); sum += values[i]; } - driverContext = new DriverContext(); + driverContext = driverContext(); try ( Driver d = new Driver( driverContext, @@ -118,7 +118,7 @@ public void testSummationAccuracy() { for (int i = 0; i < n; i++) { largeValues[i] = Double.MAX_VALUE; } - driverContext = new DriverContext(); + driverContext = driverContext(); try ( Driver d = new Driver( driverContext, @@ -137,7 +137,7 @@ public void testSummationAccuracy() { for (int i = 0; i < n; i++) { largeValues[i] = -Double.MAX_VALUE; } - driverContext = new DriverContext(); + driverContext = driverContext(); try ( Driver d = new Driver( driverContext, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index d9e073ace9b6e..552b0d2d8836f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -49,7 +49,7 @@ protected void assertSimpleOutput(List input, Block result) { } public void testRejectsDouble() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver d = new Driver( driverContext, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index 25e3d62ae9ed8..21880eb6b1a3e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -49,7 +49,7 @@ public void assertSimpleOutput(List input, Block result) { } public void testOverflowFails() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver d = new Driver( driverContext, @@ -65,7 +65,7 @@ public void testOverflowFails() { } public void testRejectsDouble() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver d = new Driver( driverContext, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index f3eef4ea45f90..bbafc8ed753cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -143,7 +143,7 @@ public void testEmpty() { } private void testSimple(int size, int limit) { - DriverContext ctx = new DriverContext(); + DriverContext ctx = driverContext(); LuceneSourceOperator.Factory factory = simple(nonBreakingBigArrays(), DataPartitioning.SHARD, size, limit); Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory( reader, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 7abf042fa851f..54853abd0cecb 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -148,7 +148,7 @@ public void testEmpty() { } private void testSimple(int size, int limit) { - DriverContext ctx = new DriverContext(); + DriverContext ctx = driverContext(); LuceneTopNSourceOperator.Factory factory = simple(nonBreakingBigArrays(), DataPartitioning.SHARD, size, limit); Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory( reader, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index c2c8c9e05c064..64edcaa43d89b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -160,7 +160,7 @@ protected SourceOperator simpleInput(int size) { randomPageSize(), LuceneOperator.NO_LIMIT ); - return luceneFactory.get(new DriverContext()); + return luceneFactory.get(driverContext()); } @Override @@ -226,7 +226,7 @@ public void testLoadAllInOnePageShuffled() { } private void loadSimpleAndAssert(List input) { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List results = new ArrayList<>(); List operators = List.of( factory( @@ -390,7 +390,7 @@ public void testValuesSourceReaderOperatorWithNulls() throws IOException { reader = w.getReader(); } - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); var luceneFactory = new LuceneSourceOperator.Factory( List.of(mockSearchContext(reader)), ctx -> new MatchAllDocsQuery(), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index e70160041047e..5edaa8d8da340 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -60,8 +60,7 @@ public final void testSimpleDescription() { Operator.OperatorFactory factory = simple(nonBreakingBigArrays()); String description = factory.describe(); assertThat(description, equalTo(expectedDescriptionOfSimple())); - DriverContext driverContext = new DriverContext(); - try (Operator op = factory.get(driverContext)) { + try (Operator op = factory.get(driverContext())) { if (op instanceof GroupingAggregatorFunction) { assertThat(description, matchesPattern(GROUPING_AGG_FUNCTION_DESCRIBE_PATTERN)); } else { @@ -74,7 +73,7 @@ public final void testSimpleDescription() { * Makes sure the description of {@link #simple} matches the {@link #expectedDescriptionOfSimple}. */ public final void testSimpleToString() { - try (Operator operator = simple(nonBreakingBigArrays()).get(new DriverContext())) { + try (Operator operator = simple(nonBreakingBigArrays()).get(driverContext())) { assertThat(operator.toString(), equalTo(expectedToStringOfSimple())); } } @@ -85,4 +84,11 @@ public final void testSimpleToString() { protected final BigArrays nonBreakingBigArrays() { return new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(); } + + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + protected final DriverContext driverContext() { + return new DriverContext(nonBreakingBigArrays()); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 48911c208bdc9..2c566aa46c413 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.action.support.ListenableActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -20,6 +22,7 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; @@ -116,7 +119,7 @@ public void close() { }); PlainActionFuture future = new PlainActionFuture<>(); Driver driver = new Driver( - new DriverContext(), + driverContext(), sourceOperator, List.of(asyncOperator), outputOperator, @@ -205,4 +208,13 @@ protected void doRun() { threadPool.schedule(command, delay, threadPool.executor(ESQL_TEST_EXECUTOR)); } } + + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + DriverContext driverContext() { + return new DriverContext( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + ); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java index 8e322d6a80b99..dcf56c09efe05 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java @@ -41,12 +41,10 @@ public class DriverContextTests extends ESTestCase { - final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); - private static final String ESQL_TEST_EXECUTOR = "esql_test_executor"; public void testEmptyFinished() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = new AssertingDriverContext(); driverContext.finish(); assertTrue(driverContext.isFinished()); var snapshot = driverContext.getSnapshot(); @@ -54,7 +52,7 @@ public void testEmptyFinished() { } public void testAddByIdentity() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = new AssertingDriverContext(); ReleasablePoint point1 = new ReleasablePoint(1, 2); ReleasablePoint point2 = new ReleasablePoint(1, 2); assertThat(point1, equalTo(point2)); @@ -68,9 +66,11 @@ public void testAddByIdentity() { } public void testAddFinish() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = new AssertingDriverContext(); int count = randomInt(128); - Set releasables = IntStream.range(0, count).mapToObj(i -> randomReleasable()).collect(toIdentitySet()); + Set releasables = IntStream.range(0, count) + .mapToObj(i -> randomReleasable(driverContext.bigArrays())) + .collect(toIdentitySet()); assertThat(releasables, hasSize(count)); releasables.forEach(driverContext::addReleasable); @@ -84,7 +84,7 @@ public void testAddFinish() { } public void testRemoveAbsent() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = new AssertingDriverContext(); boolean removed = driverContext.removeReleasable(new NoOpReleasable()); assertThat(removed, equalTo(false)); driverContext.finish(); @@ -94,9 +94,11 @@ public void testRemoveAbsent() { } public void testAddRemoveFinish() { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = new AssertingDriverContext(); int count = randomInt(128); - Set releasables = IntStream.range(0, count).mapToObj(i -> randomReleasable()).collect(toIdentitySet()); + Set releasables = IntStream.range(0, count) + .mapToObj(i -> randomReleasable(driverContext.bigArrays())) + .collect(toIdentitySet()); assertThat(releasables, hasSize(count)); releasables.forEach(driverContext::addReleasable); @@ -112,9 +114,7 @@ public void testMultiThreaded() throws Exception { ExecutorService executor = threadPool.executor(ESQL_TEST_EXECUTOR); int tasks = randomIntBetween(4, 32); - List testDrivers = IntStream.range(0, tasks) - .mapToObj(i -> new TestDriver(new AssertingDriverContext(), randomInt(128), bigArrays)) - .toList(); + List testDrivers = IntStream.range(0, tasks).mapToObj(DriverContextTests::newTestDriver).toList(); List> futures = executor.invokeAll(testDrivers, 1, TimeUnit.MINUTES); assertThat(futures, hasSize(tasks)); for (var fut : futures) { @@ -135,9 +135,18 @@ public void testMultiThreaded() throws Exception { finishedReleasables.stream().flatMap(Set::stream).forEach(Releasable::close); } + static TestDriver newTestDriver(int unused) { + var driverContext = new AssertingDriverContext(); + return new TestDriver(driverContext, randomInt(128), driverContext.bigArrays()); + } + static class AssertingDriverContext extends DriverContext { volatile Thread thread; + AssertingDriverContext() { + super(new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService())); + } + @Override public boolean addReleasable(Releasable releasable) { checkThread(); @@ -219,10 +228,6 @@ static Set randomNFromCollection(Set input, int n) { return result; } - Releasable randomReleasable() { - return randomReleasable(bigArrays); - } - static Releasable randomReleasable(BigArrays bigArrays) { return switch (randomInt(3)) { case 0 -> new NoOpReleasable(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 5024c28d86a91..1c12fbf4bcd52 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -55,7 +55,7 @@ protected final Operator.OperatorFactory simple(BigArrays bigArrays) { public final void testInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); List results = new ArrayList<>(); @@ -79,7 +79,7 @@ public final void testInitialFinal() { public final void testManyInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); @@ -100,7 +100,7 @@ public final void testManyInitialFinal() { public final void testInitialIntermediateFinal() { BigArrays bigArrays = nonBreakingBigArrays(); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); List results = new ArrayList<>(); @@ -126,7 +126,7 @@ public final void testInitialIntermediateFinal() { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99160") public final void testManyInitialManyPartialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); @@ -217,7 +217,7 @@ List createDriversForInput(BigArrays bigArrays, List input, List

drivers = new ArrayList<>(); for (List pages : splitInput) { - DriverContext driver1Context = new DriverContext(); + DriverContext driver1Context = driverContext(); drivers.add( new Driver( driver1Context, @@ -234,7 +234,7 @@ List createDriversForInput(BigArrays bigArrays, List input, List

oneDriverPerPageList(Iterator> source, Sup List in = source.next(); try ( Driver d = new Driver( - new DriverContext(), + driverContext(), new CannedSourceOperator(in.iterator()), operators.get(), new PageConsumerOperator(result::add), @@ -131,7 +131,7 @@ protected final List oneDriverPerPageList(Iterator> source, Sup private void assertSimple(BigArrays bigArrays, int size) { List input = CannedSourceOperator.collectPages(simpleInput(size)); - List results = drive(simple(bigArrays.withCircuitBreaking()).get(new DriverContext()), input.iterator()); + List results = drive(simple(bigArrays.withCircuitBreaking()).get(driverContext()), input.iterator()); assertSimpleOutput(input, results); } @@ -143,7 +143,7 @@ protected final List drive(List operators, Iterator input) List results = new ArrayList<>(); try ( Driver d = new Driver( - new DriverContext(), + driverContext(), new CannedSourceOperator(input), operators, new PageConsumerOperator(results::add), @@ -166,7 +166,7 @@ public static void runDriver(List drivers) { drivers.add( new Driver( "dummy-session", - new DriverContext(), + new DriverContext(BigArrays.NON_RECYCLING_INSTANCE), () -> "dummy-driver", new SequenceLongBlockSourceOperator(LongStream.range(0, between(1, 100)), between(1, 100)), List.of(), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java index ac7bc2f7e4ad1..bb2713e105b93 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java @@ -8,12 +8,15 @@ package org.elasticsearch.compute.operator; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -22,7 +25,9 @@ import static org.hamcrest.Matchers.equalTo; public class RowOperatorTests extends ESTestCase { - final DriverContext driverContext = new DriverContext(); + final DriverContext driverContext = new DriverContext( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + ); public void testBoolean() { RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(false)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index ec9fbac2c0368..af6c89395f245 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; @@ -28,6 +30,7 @@ import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; import org.elasticsearch.test.ESTestCase; @@ -267,7 +270,7 @@ void runConcurrentTest( for (int i = 0; i < numSinks; i++) { String description = "sink-" + i; ExchangeSinkOperator sinkOperator = new ExchangeSinkOperator(exchangeSink.get(), Function.identity()); - DriverContext dc = new DriverContext(); + DriverContext dc = driverContext(); Driver d = new Driver( "test-session:1", dc, @@ -283,7 +286,7 @@ void runConcurrentTest( for (int i = 0; i < numSources; i++) { String description = "source-" + i; ExchangeSourceOperator sourceOperator = new ExchangeSourceOperator(exchangeSource.get()); - DriverContext dc = new DriverContext(); + DriverContext dc = driverContext(); Driver d = new Driver( "test-session:2", dc, @@ -469,4 +472,13 @@ public void sendResponse(Exception exception) throws IOException { in.sendResponse(exception); } } + + /** + * A {@link DriverContext} with a BigArrays that does not circuit break. + */ + DriverContext driverContext() { + return new DriverContext( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + ); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index fee7b4d336270..f7ead4912d1be 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -183,7 +183,7 @@ public void testRamBytesUsed() { List.of(DEFAULT_UNSORTABLE), List.of(new TopNOperator.SortOrder(0, true, false)), pageSize - ).get(new DriverContext()); + ).get(driverContext()); long actualEmpty = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE); assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); // But when we fill it then we're quite close @@ -452,7 +452,7 @@ public void testCollectAllValues() { } List> actualTop = new ArrayList<>(); - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( driverContext, @@ -536,7 +536,7 @@ public void testCollectAllValues_RandomMultiValues() { expectedTop.add(eTop); } - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( @@ -569,7 +569,7 @@ private List> topNTwoColumns( List encoder, List sortOrders ) { - DriverContext driverContext = new DriverContext(); + DriverContext driverContext = driverContext(); List> outputValues = new ArrayList<>(); try ( Driver driver = new Driver( @@ -611,7 +611,7 @@ public void testTopNManyDescriptionAndToString() { + sorts + "]]"; assertThat(factory.describe(), equalTo("TopNOperator[count=10" + tail)); - try (Operator operator = factory.get(new DriverContext())) { + try (Operator operator = factory.get(driverContext())) { assertThat(operator.toString(), equalTo("TopNOperator[count=0/10" + tail)); } } @@ -831,7 +831,7 @@ private void assertSortingOnMV( int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( - new DriverContext(), + driverContext(), new CannedSourceOperator(List.of(page).iterator()), List.of(new TopNOperator(topCount, List.of(blockType), List.of(encoder), List.of(sortOrders), randomPageSize())), new PageConsumerOperator(p -> readInto(actualValues, p)), @@ -965,7 +965,7 @@ public void testIPSortingSingleValue() throws UnknownHostException { List> actual = new ArrayList<>(); try ( Driver driver = new Driver( - new DriverContext(), + driverContext(), new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( @@ -1088,7 +1088,7 @@ private void assertIPSortingOnMultiValues( List> actual = new ArrayList<>(); try ( Driver driver = new Driver( - new DriverContext(), + driverContext(), new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( @@ -1169,7 +1169,7 @@ public void testZeroByte() { List> actual = new ArrayList<>(); try ( Driver driver = new Driver( - new DriverContext(), + driverContext(), new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java index 86d082f1051ab..3829ed3ac3198 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; @@ -25,6 +27,7 @@ import org.elasticsearch.compute.operator.OutputOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.transport.TransportService; @@ -145,7 +148,7 @@ protected void start(Driver driver, ActionListener listener) { Driver.start(executor, driver, between(1, 1000), listener); } }; - Driver driver = new Driver(new DriverContext(), sourceOperator, List.of(enrichOperator), outputOperator, () -> {}); + Driver driver = new Driver(driverContext(), sourceOperator, List.of(enrichOperator), outputOperator, () -> {}); PlainActionFuture future = new PlainActionFuture<>(); runner.runToCompletion(List.of(driver), future); future.actionGet(TimeValue.timeValueSeconds(30)); @@ -224,4 +227,10 @@ public void testRandom() { public void testMultipleMatches() { } + + static DriverContext driverContext() { + return new DriverContext( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + ); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 2d1480a947d25..df7058e28fb43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.ElementType; @@ -96,12 +97,19 @@ public class EnrichLookupService { private final SearchService searchService; private final TransportService transportService; private final Executor executor; + private final BigArrays bigArrays; - public EnrichLookupService(ClusterService clusterService, SearchService searchService, TransportService transportService) { + public EnrichLookupService( + ClusterService clusterService, + SearchService searchService, + TransportService transportService, + BigArrays bigArrays + ) { this.clusterService = clusterService; this.searchService = searchService; this.transportService = transportService; this.executor = transportService.getThreadPool().executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); + this.bigArrays = bigArrays; transportService.registerRequestHandler(LOOKUP_ACTION_NAME, this.executor, LookupRequest::new, new TransportHandler()); } @@ -200,7 +208,7 @@ private void doLookup( OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); Driver driver = new Driver( "enrich-lookup:" + sessionId, - new DriverContext(), + new DriverContext(bigArrays), () -> lookupDescription(sessionId, shardId, matchType, matchField, extractFields, inputPage.getPositionCount()), queryOperator, intermediateOperators, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index aa4d9235bdb40..a518dd36e3e9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.evaluator.mapper; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.compute.operator.ThrowingDriverContext; import org.elasticsearch.xpack.ql.expression.Expression; import java.util.function.Function; @@ -31,7 +31,7 @@ public interface EvaluatorMapper { */ default Object fold() { return toJavaObject( - toEvaluator(e -> driverContext -> p -> fromArrayRow(e.fold())[0]).get(DriverContext.DEFAULT).eval(new Page(1)), + toEvaluator(e -> driverContext -> p -> fromArrayRow(e.fold())[0]).get(new ThrowingDriverContext()).eval(new Page(1)), 0 ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 614277e9d7216..6d859ef857bc7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -65,7 +65,7 @@ public TransportEsqlQueryAction( this.requestExecutor = threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); exchangeService.registerTransportHandler(transportService); this.exchangeService = exchangeService; - this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService); + this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays); this.computeService = new ComputeService( searchService, transportService, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 80aeb25d49d4a..017034eba9c64 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -10,6 +10,8 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; @@ -17,6 +19,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.logging.LogManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -180,7 +183,7 @@ public final void testEvaluate() { expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType)); // TODO should we convert unsigned_long into BigDecimal so it's easier to assert? - Object result = toJavaObject(evaluator(expression).get(new DriverContext()).eval(row(testCase.getDataValues())), 0); + Object result = toJavaObject(evaluator(expression).get(driverContext()).eval(row(testCase.getDataValues())), 0); assertThat(result, not(equalTo(Double.NaN))); assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); @@ -194,7 +197,7 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); List simpleData = testCase.getDataValues(); - EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(new DriverContext()); + EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext()); Block[] orig = BlockUtils.fromListRow(simpleData); for (int i = 0; i < orig.length; i++) { List data = new ArrayList<>(); @@ -231,7 +234,7 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru Page page = row(simpleData); futures.add(exec.submit(() -> { - EvalOperator.ExpressionEvaluator eval = evalSupplier.get(new DriverContext()); + EvalOperator.ExpressionEvaluator eval = evalSupplier.get(driverContext()); for (int c = 0; c < count; c++) { assertThat(toJavaObject(eval.eval(page), 0), testCase.getMatcher()); } @@ -249,7 +252,7 @@ public final void testEvaluatorToString() { assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); var supplier = evaluator(buildFieldExpression(testCase)); - var ev = supplier.get(new DriverContext()); + var ev = supplier.get(driverContext()); assertThat(ev.toString(), equalTo(testCase.evaluatorToString)); } @@ -608,4 +611,13 @@ private static void writeToTempDir(String subdir, String str, String extension) Path file = dir.resolve(functionName() + "." + extension); Files.writeString(file, str); } + + /** + * A {@link DriverContext} with a BigArrays that does not circuit break. + */ + protected DriverContext driverContext() { + return new DriverContext( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 79138679e2414..68cea4ea873a5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -89,9 +88,7 @@ protected Expression build(Source source, List args) { public void testEvalCase() { testCase( caseExpr -> toJavaObject( - caseExpr.toEvaluator(child -> evaluator(child)) - .get(new DriverContext()) - .eval(new Page(IntBlock.newConstantBlockWith(0, 1))), + caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0 ) ); @@ -157,7 +154,7 @@ public void testCaseIsLazy() { }; } return evaluator(child); - }).get(new DriverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0)); + }).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1))), 0)); } private static Case caseExpr(Object... args) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index cbfb0d6a579fe..853fe44d12ec9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -86,14 +85,14 @@ public void testExamples() { private Object process(Number val) { return toJavaObject( - evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), null)).get(new DriverContext()).eval(row(List.of(val))), + evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), null)).get(driverContext()).eval(row(List.of(val))), 0 ); } private Object process(Number val, int decimals) { return toJavaObject( - evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), field("decimals", DataTypes.INTEGER))).get(new DriverContext()) + evaluator(new Round(Source.EMPTY, field("val", typeOf(val)), field("decimals", DataTypes.INTEGER))).get(driverContext()) .eval(row(List.of(val, decimals))), 0 ); @@ -119,7 +118,7 @@ protected DataType expectedType(List argTypes) { public void testNoDecimalsToString() { assertThat( - evaluator(new Round(Source.EMPTY, field("val", DataTypes.DOUBLE), null)).get(new DriverContext()).toString(), + evaluator(new Round(Source.EMPTY, field("val", DataTypes.DOUBLE), null)).get(driverContext()).toString(), equalTo("RoundDoubleNoDecimalsEvaluator[val=Attribute[channel=0]]") ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index a300dbb383211..714112b2db543 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -13,7 +13,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -445,7 +444,7 @@ private void testBlock(boolean insertNulls) { builder.copyFrom(oneRowBlock, 0, 1); } Block input = builder.build(); - Block result = evaluator(buildFieldExpression(testCase)).get(new DriverContext()).eval(new Page(input)); + Block result = evaluator(buildFieldExpression(testCase)).get(driverContext()).eval(new Page(input)); assertThat(result.getPositionCount(), equalTo(result.getPositionCount())); for (int p = 0; p < input.getPositionCount(); p++) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java index f6082af0e142e..e7670c9840b91 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java @@ -72,7 +72,7 @@ public void testNull() { BytesRef bar = new BytesRef("bar"); BytesRef delim = new BytesRef(";"); Expression expression = buildFieldExpression(testCase); - DriverContext dvrCtx = new DriverContext(); + DriverContext dvrCtx = driverContext(); assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(Arrays.asList(foo, bar), null))), 0), nullValue()); assertThat(toJavaObject(evaluator(expression).get(dvrCtx).eval(row(Arrays.asList(foo, null))), 0), nullValue()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index b2345e85336d4..a70fbf45ab4fd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -88,7 +87,7 @@ public void testCoalesceIsLazy() { return dvrCtx -> page -> { throw new AssertionError("shouldn't be called"); }; } return EvalMapper.toEvaluator(child, layout); - }).get(new DriverContext()).eval(row(testCase.getDataValues())), 0), testCase.getMatcher()); + }).get(driverContext()).eval(row(testCase.getDataValues())), 0), testCase.getMatcher()); } public void testCoalesceNullabilityIsUnknown() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index 7d5c18a5e3fd6..39d328747199d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -99,7 +98,7 @@ public void testMany() { field("a", DataTypes.KEYWORD), IntStream.range(1, 5).mapToObj(i -> field(Integer.toString(i), DataTypes.KEYWORD)).toList() ) - ).get(new DriverContext()).eval(row(simpleData)), + ).get(driverContext()).eval(row(simpleData)), 0 ), equalTo(new BytesRef("cats and dogs")) @@ -121,7 +120,7 @@ public void testSomeConstant() { field("c", DataTypes.KEYWORD) ) ) - ).get(new DriverContext()).eval(row(simpleData)), + ).get(driverContext()).eval(row(simpleData)), 0 ), equalTo(new BytesRef("cats and dogs")) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index 77807e6463324..3d9e8d677f3e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -199,7 +198,7 @@ public void testUnicode() { private String process(String str, int length) { Block result = evaluator( new Left(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) - ).get(new DriverContext()).eval(row(List.of(new BytesRef(str)))); + ).get(driverContext()).eval(row(List.of(new BytesRef(str)))); if (null == result) { return null; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index 39222386a0cb0..ca9d1ef2dc1ee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -201,7 +200,7 @@ public void testUnicode() { private String process(String str, int length) { Block result = evaluator( new Right(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) - ).get(new DriverContext()).eval(row(List.of(new BytesRef(str)))); + ).get(driverContext()).eval(row(List.of(new BytesRef(str)))); if (null == result) { return null; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index fc426be21e3f6..27b8ed722f963 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -86,7 +85,7 @@ protected Expression build(Source source, List args) { public void testConstantDelimiter() { EvalOperator.ExpressionEvaluator eval = evaluator( new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) - ).get(new DriverContext()); + ).get(driverContext()); /* * 58 is ascii for : and appears in the toString below. We don't convert the delimiter to a * string because we aren't really sure it's printable. It could be a tab or a bell or some diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 19113ed65ffbe..5730b93aecd8d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; @@ -68,7 +67,7 @@ public Matcher resultsMatcher(List typedData public void testNoLengthToString() { assertThat( evaluator(new Substring(Source.EMPTY, field("str", DataTypes.KEYWORD), field("start", DataTypes.INTEGER), null)).get( - new DriverContext() + driverContext() ).toString(), equalTo("SubstringNoLengthEvaluator[str=Attribute[channel=0], start=Attribute[channel=1]]") ); @@ -137,7 +136,7 @@ private String process(String str, int start, Integer length) { new Literal(Source.EMPTY, start, DataTypes.INTEGER), length == null ? null : new Literal(Source.EMPTY, length, DataTypes.INTEGER) ) - ).get(new DriverContext()).eval(row(List.of(new BytesRef(str)))); + ).get(driverContext()).eval(row(List.of(new BytesRef(str)))); return result == null ? null : ((BytesRef) toJavaObject(result, 0)).utf8ToString(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java index 0ac08b61ec39e..3d5e7820677e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -93,7 +92,7 @@ public final void testApplyToAllTypes() { Source src = new Source(Location.EMPTY, lhsType.typeName() + " " + rhsType.typeName()); if (isRepresentable(lhsType) && isRepresentable(rhsType)) { op = build(src, field("lhs", lhsType), field("rhs", rhsType)); - result = toJavaObject(evaluator(op).get(new DriverContext()).eval(row(List.of(lhs.value(), rhs.value()))), 0); + result = toJavaObject(evaluator(op).get(driverContext()).eval(row(List.of(lhs.value(), rhs.value()))), 0); } else { op = build(src, lhs, rhs); result = op.fold(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index f4de880bcd2b0..0138160ebd0fc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -172,7 +171,7 @@ public void testEdgeCases() { private Object process(Object val) { if (testCase.allTypesAreRepresentable()) { Neg neg = new Neg(Source.EMPTY, field("val", typeOf(val))); - return toJavaObject(evaluator(neg).get(new DriverContext()).eval(row(List.of(val))), 0); + return toJavaObject(evaluator(neg).get(driverContext()).eval(row(List.of(val))), 0); } else { // just fold if type is not representable Neg neg = new Neg(Source.EMPTY, new Literal(Source.EMPTY, val, typeOf(val))); return neg.fold(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index 34e6670862249..7956892c34645 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -10,8 +10,11 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.SerializationTestUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -128,8 +131,8 @@ public void testEvaluatorSuppliers() { Layout layout = lb.build(); var supplier = EvalMapper.toEvaluator(expression, layout); - EvalOperator.ExpressionEvaluator evaluator1 = supplier.get(new DriverContext()); - EvalOperator.ExpressionEvaluator evaluator2 = supplier.get(new DriverContext()); + EvalOperator.ExpressionEvaluator evaluator1 = supplier.get(driverContext()); + EvalOperator.ExpressionEvaluator evaluator2 = supplier.get(driverContext()); assertNotNull(evaluator1); assertNotNull(evaluator2); assertTrue(evaluator1 != evaluator2); @@ -143,4 +146,10 @@ public void testExpressionSerialization() { private static FieldAttribute field(String name, DataType type) { return new FieldAttribute(Source.EMPTY, name, new EsField(name, type, Collections.emptyMap(), false)); } + + static DriverContext driverContext() { + return new DriverContext( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + ); + } } From 2c88a89c50da01c060c1b5e4faec4ee7a2cf098f Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 14 Sep 2023 17:35:06 +0100 Subject: [PATCH 066/114] Drop unnecessary exception mangling (#99560) Today the ESQL module uses `ListenableActionFuture` throughout, but this is just the same as `SubscribableListener` except for the way it mangles exceptions. The exception-mangling behaviour is unnecessary, so this commit removes it. --- .../action/support/SubscribableListener.java | 40 ++++++++++++++++++- .../support/SubscribableListenerTests.java | 17 ++++++++ .../compute/operator/AsyncOperator.java | 10 ++--- .../compute/operator/Driver.java | 20 +++++----- .../compute/operator/Operator.java | 12 ++---- .../operator/exchange/ExchangeBuffer.java | 20 +++++----- .../operator/exchange/ExchangeSink.java | 4 +- .../exchange/ExchangeSinkHandler.java | 9 ++--- .../exchange/ExchangeSinkOperator.java | 4 +- .../operator/exchange/ExchangeSource.java | 4 +- .../exchange/ExchangeSourceHandler.java | 8 ++-- .../exchange/ExchangeSourceOperator.java | 6 +-- .../compute/operator/AsyncOperatorTests.java | 4 +- .../exchange/ExchangeServiceTests.java | 4 +- .../xpack/esql/plugin/ComputeService.java | 4 +- 15 files changed, 106 insertions(+), 60 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index 96b54a951ccc9..5ba43111b4f03 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -29,7 +30,7 @@ /** * An {@link ActionListener} to which other {@link ActionListener} instances can subscribe, such that when this listener is completed it * fans-out its result to the subscribed listeners. - * + *

* Similar to {@link ListenableActionFuture} and {@link ListenableFuture} except for its handling of exceptions: if this listener is * completed exceptionally then the exception is passed to subscribed listeners without modification. */ @@ -38,6 +39,41 @@ public class SubscribableListener implements ActionListener { private static final Logger logger = LogManager.getLogger(SubscribableListener.class); private static final Object EMPTY = new Object(); + /** + * Create a {@link SubscribableListener} which is incomplete. + */ + public SubscribableListener() { + this(EMPTY); + } + + /** + * Create a {@link SubscribableListener} which has already succeeded with the given result. + */ + public static SubscribableListener newSucceeded(T result) { + return new SubscribableListener<>(new SuccessResult<>(result)); + } + + /** + * Create a {@link SubscribableListener} which has already failed with the given exception. + */ + public static SubscribableListener newFailed(Exception exception) { + return new SubscribableListener<>(new FailureResult(exception, exception)); + } + + /** + * Create a {@link SubscribableListener}, fork a computation to complete it, and return the listener. If the forking itself throws an + * exception then the exception is caught and fed to the returned listener. + */ + public static SubscribableListener newForked(CheckedConsumer, ? extends Exception> fork) { + final var listener = new SubscribableListener(); + ActionListener.run(listener, fork::accept); + return listener; + } + + private SubscribableListener(Object initialState) { + state = initialState; + } + /** * If we are incomplete, {@code state} may be one of the following depending on how many waiting subscribers there are: *